⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 hrec.c

📁 实现HMM算法
💻 C
📖 第 1 页 / 共 5 页
字号:
               dur=(frame-al->frame)*lat->framedur,                  like=like-al->like;               frame=al->frame;            }            i--;            la->lAlign[i].state=al->state;            la->lAlign[i].label=labid;            la->lAlign[i].dur=dur;            la->lAlign[i].like=like;            like=al->like;         }         if (pr!=NULL) {            if (path->prev!=NULL)               dur=(pr->frame-path->prev->frame)*lat->framedur,                  like=pr->like-path->prev->like;            else               dur=pr->frame*lat->framedur,                  like=pr->like;            i--;            la->lAlign[i].state=-1;            la->lAlign[i].label=labpr;            la->lAlign[i].dur=dur;            la->lAlign[i].like=like;         }         align=NULL;      }/*/*识别时以上部分不被执行*/*/   }/*循环for(pth=&tmp;pth!=NULL;pth=pth->chain)结束*/}/* Number/count nodes (in path->usage field) and count links *//*计算节点数和链接数*/static void MarkPaths(Path *path,int *nn,int *nl){   NxtPath *pth;   if (path->usage>=0) {      path->usage=-(*nn)++;      (*nl)++;      if (path->prev) MarkPaths(path->prev,nn,nl);/*当前此处可以被执行*/      for (pth=path->chain;pth!=NULL;pth=pth->chain) {/*当前此处不被执行*/         (*nl)++;         if (pth->prev) MarkPaths(pth->prev,nn,nl);      }   }}static Lattice *CreateLattice(MemHeap *heap,TokenSet *res,HTime framedur){   Lattice *lat;   RelToken *cur;   Path path;   WordPron pron;   NxtPath rth[MAX_TOKS];   int nn,nl,ln,i;   NetNode node;   pron.word=NULL;pron.pnum=0;pron.next=NULL;   pron.outSym=NULL;pron.phones=NULL;pron.nphones=0;   pron.prob=0.0;      path.like=res->tok.like;   path.lm=res->tok.lm;   path.usage=0;   path.align=res->tok.align;   path.node=&node;   path.node->tag=NULL;   path.node->info.pron=&pron;   path.frame=pri->frame;   path.prev=res->tok.path;   path.chain=NULL;   if (res->n>1) {/*当前res->n=0*/      path.chain=rth+1;      for (i=1,cur=res->set+1;i<res->n;i++,cur++) {         rth[i].like=res->tok.like+cur->like;         rth[i].lm=cur->lm;         rth[i].prev=cur->path;         rth[i].chain=NULL;         rth[i-1].chain=rth+i;      }   }   nn=1;nl=0;ln=0; /*当前path.usage=0*/   MarkPaths(&path,&nn,&nl);/* Number/count nodes (in path->usage field) and count links *//*计算节点数和链接数*//*printf("\n当前nn=%d,nl=%d\n",nn,nl);*/   lat=NewLattice(heap,nn,nl);/*nn为要创建词格的节点数,nl为要创建词格的弧数*/   lat->voc=pri->net->vocab;   lat->lmscale=pri->scale;   lat->wdpenalty=pri->wordpen;   lat->prscale=pri->pscale;   lat->framedur=framedur;   lat->lnodes[0].time=0.0; lat->lnodes[0].word=NULL;   lat->lnodes[0].tag=NULL;   lat->lnodes[0].score=0.0;     LatFromPaths(&path,&ln,lat);/*从路径中生成词格*//*当前ln=0*/#ifdef SANITY   if (ln!=nl)      HError(8522,"CreateLattice: Size mismatch (nl (%d) != ln (%d))",nl,ln);#endif   return(lat);}static void qcksrtM(float *array,int l,int r,int M){   int i,j;   float x,tmp;   if (l>=r || l>M || r<M) return;   x=array[(l+r)/2];i=l-1;j=r+1;   do {      do i++; while (array[i]>x);      do j--; while (array[j]<x);      if (i<j) {         tmp=array[i];array[i]=array[j];array[j]=tmp;      }   }   while(i<j);   if (j<M) qcksrtM(array,j+1,r,M);   else qcksrtM(array,l,j,M);}/* EXPORT->InitVRecInfo: initialise ready for recognition */VRecInfo *InitVRecInfo(PSetInfo *psi,int nToks,Boolean models,Boolean states){   VRecInfo *vri;   PreComp *pre;   int i,n;   char name[80];   static int prid=0;   vri=(VRecInfo*) New(&gcheap,sizeof(VRecInfo));   sprintf(name,"VRI-%d Heap",prid++);   CreateHeap(&vri->heap,name,MSTAK,1,1.0,1000,8000);   pri=(PRecInfo*) New(&vri->heap,sizeof(PRecInfo));   vri->pri=pri;   vri->pri->prid=prid;#ifdef SANITY/*初始化时执行这里*/   pri->ipos=0;   pri->start_inst=NULL;   pri->pnlen = pri->pylen = 0;   pri->anlen = pri->aylen = 0;#endif   /* Reset readable parameters */   vri->maxBeam=0;   vri->genBeam=-LZERO;   vri->wordBeam=-LZERO;   vri->nBeam=-LZERO;   vri->tmBeam=LZERO;   vri->pCollThresh=1024;   vri->aCollThresh=1024;   /* Set up private parameters */   pri->qsn=0;pri->qsa=NULL;   pri->psi=NULL;   pri->net=NULL;   pri->scale=1.0;   pri->wordpen=0.0;   /* Could be in StartNetwork ?? */   pri->states=states;pri->models=models;   if (nToks<=1) pri->nToks=0;   else if (nToks<=MAX_TOKS) pri->nToks=nToks;   else pri->nToks=MAX_TOKS;   /* SetUp heaps for recognition */   /* Model set dependent */   pri->psi=psi;   /* pri->psi->sBuf[1].n=((pri->nToks>1)?1:0);  Needed every observation */   for(i=1,pre=psi->sPre+1;i<=psi->nsp;i++,pre++) pre->id=-1;   for(i=1,pre=psi->mPre+1;i<=psi->nmp;i++,pre++) pre->id=-1;   pri->stHeap=(MemHeap *) New(&vri->heap,pri->psi->stHeapNum*sizeof(MemHeap));   for (n=1;n<=pri->psi->max;n++) {      if (pri->psi->stHeapIdx[n]>=0) {         sprintf(name,"State Heap: numStates=%d",n);         CreateHeap(pri->stHeap+pri->psi->stHeapIdx[n],name,                    MHEAP,sizeof(TokenSet)*n,1.0,100,1600);      }   }   /* nTok dependent */   if (pri->nToks>1)      CreateHeap(&pri->rTokHeap,"RelToken Heap",                 MHEAP,sizeof(RelToken)*pri->nToks,1.0,200,1600);   /* Non dependent */   CreateHeap(&pri->instHeap,"NetInst Heap",              MHEAP,sizeof(NetInst),1.0,200,1600);   CreateHeap(&pri->rPthHeap,"NxtPath Heap",              MHEAP,sizeof(NxtPath),1.0,200,1600);   CreateHeap(&pri->pathHeap,"Path Heap",              MHEAP,sizeof(Path),1.0,200,1600);   CreateHeap(&pri->alignHeap,"Align Heap",              MHEAP,sizeof(Align),1.0,200,3200);   /* Now set up instances */   pri->head.node=pri->tail.node=NULL;   pri->head.state=pri->tail.state=NULL;   pri->head.exit=pri->tail.exit=NULL;   pri->head.wdlk=pri->tail.wdlk=LZERO;   pri->head.max=pri->tail.max=LZERO;   pri->head.knil=pri->tail.link=NULL;   pri->head.link=&pri->tail;pri->tail.knil=&pri->head;   pri->pYesRef.link=&pri->pYesTail;pri->pYesTail.knil=&pri->pYesRef;   pri->pYesTail.link=pri->pYesRef.knil=NULL;pri->pYesTail.usage=-2;   pri->pNoRef.link=&pri->pNoTail;pri->pNoTail.knil=&pri->pNoRef;   pri->pNoTail.link=pri->pNoRef.knil=NULL;pri->pNoTail.usage=-2;   pri->npth=pri->cpth=0;      pri->aYesRef.link=&pri->aYesTail;pri->aYesTail.knil=&pri->aYesRef;   pri->aYesTail.link=pri->aYesRef.knil=NULL;pri->aYesTail.usage=-2;   pri->aNoRef.link=&pri->aNoTail;pri->aNoTail.knil=&pri->aNoRef;   pri->aNoTail.link=pri->aNoRef.knil=NULL;pri->aNoTail.usage=-2;   pri->nalign=pri->calign=0;   return(vri);}/* EXPORT->DeleteVRecInfo: Finished with this recogniser */void DeleteVRecInfo(VRecInfo *vri){   PRecInfo *pri;   int i;      pri=vri->pri;   for (i=0;i<pri->psi->stHeapNum;i++)      DeleteHeap(pri->stHeap+i);   if (pri->nToks>1)      DeleteHeap(&pri->rTokHeap);   DeleteHeap(&pri->instHeap);   DeleteHeap(&pri->rPthHeap);   DeleteHeap(&pri->pathHeap);   DeleteHeap(&pri->alignHeap);   DeleteHeap(&vri->heap);   Dispose(&gcheap,vri);}/* EXPORT->BeginRecNet: initialise network ready for recognition 初始化网络准备识别*/void StartRecognition(VRecInfo *vri,Network *net,                      float scale,LogFloat wordpen,float pscale){   NetNode *node;   NetInst *inst,*next;   PreComp *pre;   int i;   /*vri为*VRecInfo,是Visible recognition information,pri为*PRecInfo,是Private recognition information*/   pri=vri->pri;   if (pri==NULL)      HError(8570,"StartRecognition: Visible recognition info not initialised");   /* pri->psi->sBuf[1].n=((pri->nToks>1)?1:0);  Only needed for Step1 */   vri->noTokenSurvived=TRUE;   pri->net=net;   pri->scale=scale;   pri->wordpen=wordpen;   pri->pscale=pscale;   /* Initialise the network and instances ready for first frame */   /*printf("\n当前pri->psi->nsp%d\n",pri->psi->nsp);                    */   for (node=pri->net->chain;node!=NULL;node=node->chain) node->inst=NULL;/*当node为词的内部节点时node->inst=NULL*//*将私有识别信息的网络连中的每个节点实例设置为NULL*/   pri->net->final.inst=pri->net->initial.inst=NULL;/*将私有识别信息的网络的初始和结束节点实例设置为NULL*/   for(i=1,pre=pri->psi->sPre+1;i<=pri->psi->nsp;i++,pre++) pre->id=-1;   for(i=1,pre=pri->psi->mPre+1;i<=pri->psi->nmp;i++,pre++) pre->id=-1;   pri->tact=pri->nact=pri->frame=0;   /*if(pri->net->initial.info.pron!=NULL) printf("\n执行\n");*/   AttachInst(&pri->net->initial);/*为私有识别信息中网络的初始节点设置网络实例,并将其加入到私有识别网络的实例链表中,并将其余节点对应的网络实例加入到私有识别网络的实例链表中*/   inst=pri->net->initial.inst;   inst->state->tok.like=inst->max=0.0;   inst->state->tok.lm=0.0;   inst->state->tok.path=NULL;   inst->state->n=((pri->nToks>1)?1:0);   vri->genMaxNode=vri->wordMaxNode=NULL;   vri->genMaxTok=vri->wordMaxTok=null_token;   pri->wordThresh=pri->genThresh=pri->nThresh=LSMALL;   pri->genMaxNode=pri->wordMaxNode=NULL;   pri->genMaxTok=pri->wordMaxTok=null_token;   for (inst=pri->head.link;inst!=NULL && inst->node!=NULL;inst=next)      if (inst->max<pri->genThresh) {/*如果节点的max实例的似然度比全局的域值要小的话,将该实例对应的节点从网络中删除*//*当前此条件为假*/         next=inst->link;         DetachInst(inst->node);      }      else {/*当前执行此分支*//*if (inst->node->info.hmm!=NULL) printf("\n模型\n");if (inst->node->info.pron!=NULL) printf("\n词节点\n");*/         pri->nxtInst=inst;/*将初始的网络实例赋给pri->nxtInst*/         StepInst2(inst->node);/*inst->node有时是hmm节点,有时是词节点,初始化实例节点的token*//*每经过一个词节点就创建一个新的路径,用新路径保存当前节点,如果旧路径没有移到pri->pYesRef路径链表中,则将其移到pri->pYesRef路径链表的头部,并循环将tok传递到与之相连的目标节点*/         next=pri->nxtInst->link;      }}void ProcessObservation(VRecInfo *vri,Observation *obs,int id, AdaptXForm *xform){   NetInst *inst,*next;   int j;   float thresh;
      pri=vri->pri;   inXForm = xform; /* sepcifies the transform to use for this observation */   if (pri==NULL)      HError(8570,"ProcessObservation: Visible recognition info not initialised");   if (pri->net==NULL)      HError(8570,"ProcessObservation: Recognition not started");   pri->psi->sBuf[1].n=((pri->nToks>1)?1:0); /* Needed every observation */   pri->frame++;   pri->obs=obs;   if (id<0) pri->id=(pri->prid<<20)+pri->frame;/*当前执行此分支*/   else pri->id=id;   if (obs->swidth[0]!=pri->psi->hset->swidth[0])      HError(8571,"ProcessObservation: incompatible number of streams (%d vs %d)",             obs->swidth[0],pri->psi->hset->swidth[0]);   if (pri->psi->mixShared)      for (j=1;j<=obs->swidth[0];j++)         if (VectorSize(obs->fv[j])!=pri->psi->hset->swidth[j])            HError(8571,"ProcessObservatio: incompatible stream widths for %d (%d vs %d)",                   j,VectorSize(obs->fv[j]),pri->psi->hset->swidth[j]);   /* Max model pruning is done initially in a separate pass */   if (vri->maxBeam>0 && pri->nact>vri->maxBeam) {/*当前不执行此处*/      if (pri->nact>pri->qsn) {         if (pri->qsn>0)            Dispose(&vri->heap,pri->qsa);         pri->qsn=(pri->nact*3)/2;         pri->qsa=(LogFloat*) New(&vri->heap,pri->qsn*sizeof(LogFloat));      }      for (inst=pri->head.link,j=0;inst!=NULL;inst=inst->link,j++)         pri->qsa[j]=inst->max;      if (j>=vri->maxBeam) {         qcksrtM(pri->qsa,0,j-1,vri->maxBeam);         thresh=pri->qsa[vri->maxBeam];         if (thresh>LSMALL)             for (inst=pri->head.link;inst->link!=NULL;inst=next) {               next=inst->link;               if (inst->max<thresh)                   DetachInst(inst->node);            }      }   }/*以上部分不被执行*/      if (pri->psi->hset->hsKind==TIEDHS)/*当前不执行此处*/      PrecomputeTMix(pri->psi->hset,obs,vri->tmBeam,0);   /* Pass 1 must calculate top of all beams - inc word end !! */   pri->genMaxTok=pri->wordMaxTok=null_token;   pri->genMaxNode=pri->wordMaxNode=NULL;   for (inst=pri->head.link,j=0;inst!=NULL;inst=inst->link,j++)      if (inst->node)/*当前执行此处*/         StepInst1(inst->node);/*token的第一轮传播(内部传播)*//* First pass of token propagation (Internal) */		    /* Not changing beam width for max model pruning */      pri->wordThresh=pri->wordMaxTok.like-vri->wordBeam;   if (pri->wordThresh<LSMALL) pri->wordThresh=LSMALL;   pri->genThresh=pri->genMaxTok.like-vri->genBeam;   if (pri->genThresh<LSMALL) pri->genThresh=LSMALL;   if (pri->nToks>1) {/*当前此处不被执行*/      pri->nThresh=pri->genMaxTok.like-vri->nBeam;      if (pri->nThresh<LSMALL/2) pri->nThresh=LSMALL/2;   }      /* Pass 2 Performs external token propagation and pruning */   for (inst=pri->head.link,j=0;inst!=NULL && inst->node!=NULL;inst=

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -