⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 discrime.c

📁 General Hidden Markov Model Library 一个通用的隐马尔科夫模型的C代码库
💻 C
📖 第 1 页 / 共 3 页
字号:
{#define CUR_PROC "discrime_print_statistics"  int k, l, m;  int argmax;  double *logp, max;  ghmm_dseq *sq;  ARRAY_CALLOC (logp, noC);  for (k = 0; k < noC; k++) {    falseP[k] = 0;    falseN[k] = 0;  }  for (k = 0; k < noC; k++) {    sq = sqs[k];    printf ("Looking at training tokens of Class %d\n", k);    for (l = 0; l < sq->seq_number; l++) {      argmax = 0, max = -DBL_MAX;      for (m = 0; m < noC; m++) {        ghmm_dmodel_logp (mo[m], sq->seq[l], sq->seq_len[l], &(logp[m]));        if (m == 0 || max < logp[m]) {          max = logp[m];          argmax = m;        }      }      if (sq->seq_number < 11 && noC < 8) {        /* printing fancy statistics */        printf ("%2d: %8.4g", l, logp[0] - logp[argmax]);        for (m = 1; m < noC; m++)          printf (",  %8.4g", logp[m] - logp[argmax]);        printf ("  \t+(%g)\n", logp[argmax]);      }      /* counting false positives and false negatives */      if (argmax != k) {        falseP[argmax]++;        falseN[k]++;      }    }    printf ("%d false negatives in class %d.\n", falseN[k], k);  }STOP:     /* Label STOP from ARRAY_[CM]ALLOC */  m_free (logp);  return;#undef CUR_PROC}/*----------------------------------------------------------------------------*/static void discrime_trim_gradient (double *new, int length){#define CUR_PROC "discrime_trim_gradient"  int i, argmin = 0;  double min, sum = 0;/*   printf("unnormalised: %g", new[0]); *//*   for (i=1; i<length; i++) *//*     printf(", %g", new[i]); *//*   printf("\n"); */  for (i = 1; i < length; i++)    if (new[i] < new[argmin])      argmin = i;  min = new[argmin];  if (new[argmin] < 0.0)    for (i = 0; i < length; i++)      new[i] -= (1.1 * min);  for (i = 0; i < length; i++)    sum += new[i];  for (i = 0; i < length; i++)    new[i] /= sum;/*   printf("  normalised: %g", new[0]); *//*   for (i=1; i<length; i++) *//*     printf(", %g", new[i]); *//*   printf("\n"); */  return;#undef CUR_PROC}/*----------------------------------------------------------------------------*/static void discrime_update_pi_gradient (ghmm_dmodel ** mo, ghmm_dseq ** sqs, int noC,                                  int class, double ****expect_pi,                                  long double **omega, long double ***omegati){#define CUR_PROC "discrime_update_pi_gradient"  int i, l, m;  double *pi_old = NULL;  double *pi_new = NULL;  double sum;  ghmm_dseq *sq = NULL;  ARRAY_CALLOC (pi_old, mo[class]->N);  ARRAY_CALLOC (pi_new, mo[class]->N);  /* itarate over all states of the current model */  for (i = 0; i < mo[class]->N; i++) {    /* iterate over all classes */    sum = 0.0;    for (m = 0; m < noC; m++) {      sq = sqs[m];      /*iterate over all training sequences */      if (m == class)        for (l = 0; l < sq->seq_number; l++)          sum += omega[class][l] * expect_pi[class][l][class][i];      else        for (l = 0; l < sq->seq_number; l++)          sum -= omegati[m][l][class] * expect_pi[m][l][class][i];    }    /* check for valid new parameter */    pi_old[i] = mo[class]->s[i].pi;    if (fabs (pi_old[i]) == 0.0)      pi_new[i] = pi_old[i];    else      pi_new[i] = pi_old[i] + discrime_lambda * (sum / pi_old[i]);  }  /* change paramters to fit into valid parameter range */  discrime_trim_gradient (pi_new, mo[class]->N);  /* update parameters */  for (i = 0; i < mo[class]->N; i++) {    /* printf("update parameter Pi in state %d of model %d with: %5.3g",       i, class, pi_new[i]);       printf(" (%5.3g) old value.\n", pi_old[i]); */    mo[class]->s[i].pi = pi_new[i];  }STOP:     /* Label STOP from ARRAY_[CM]ALLOC */  m_free (pi_old);  m_free (pi_new);  return;#undef CUR_PROC}/*----------------------------------------------------------------------------*/static void discrime_update_a_gradient (ghmm_dmodel ** mo, ghmm_dseq ** sqs, int noC,                                 int class, double ****expect_a,                                 long double **omega, long double ***omegati){#define CUR_PROC "discrime_update_a_gradient"  int i, j, g, l, m;  int j_id;  double *a_old = NULL;  double *a_new = NULL;  double sum;  ghmm_dseq *sq = NULL;  ARRAY_CALLOC (a_old, mo[class]->N);  ARRAY_CALLOC (a_new, mo[class]->N);  /* updating current class */  /* itarate over all states of the current model */  for (i = 0; i < mo[class]->N; i++) {    for (j = 0; j < mo[class]->s[i].out_states; j++) {      j_id = mo[class]->s[i].out_id[j];      sum = 0.0;      /* iterate over all classes and training sequences */      for (m = 0; m < noC; m++) {        sq = sqs[m];        if (m == class)          for (l = 0; l < sq->seq_number; l++)            sum +=              omega[class][l] * expect_a[class][l][+class][i * mo[class]->N +                                                           j_id];        else          for (l = 0; l < sq->seq_number; l++)            sum -=              omegati[m][l][class] * expect_a[m][l][class][i * mo[class]->N +                                                           j_id];      }      /* check for valid new parameter */      a_old[j] = mo[class]->s[i].out_a[j];      if (fabs (a_old[j]) == 0.0)        a_new[j] = a_old[j];      else        a_new[j] = a_old[j] + discrime_lambda * (sum / a_old[j]);    }    /* change paramters to fit into valid parameter range */    discrime_trim_gradient (a_new, mo[class]->s[i].out_states);    /* update parameter */    for (j = 0; j < mo[class]->s[i].out_states; j++) {      /* printf("update parameter A[%d] in state %d of model %d with: %5.3g",         j, i, class, a_new[j]);         printf(" (%5.3g) old value.\n", a_old[j]); */      mo[class]->s[i].out_a[j] = a_new[j];      /* mirror out_a to corresponding in_a */      j_id = mo[class]->s[i].out_id[j];      for (g = 0; g < mo[class]->s[j_id].in_states; g++)        if (i == mo[class]->s[j_id].in_id[g]) {          mo[class]->s[j_id].in_a[g] = mo[class]->s[i].out_a[j];          break;        }    }  }STOP:     /* Label STOP from ARRAY_[CM]ALLOC */  m_free (a_old);  m_free (a_new);  return;#undef CUR_PROC}/*----------------------------------------------------------------------------*/static void discrime_update_b_gradient (ghmm_dmodel ** mo, ghmm_dseq ** sqs, int noC,                                 int class, double *****expect_b,                                 long double **omega, long double ***omegati){#define CUR_PROC "discrime_update_b_gradient"  int i, h, l, m;  int hist, size;  double *b_old = NULL;  double *b_new = NULL;  double sum;  ARRAY_CALLOC (b_old, mo[class]->M);  ARRAY_CALLOC (b_new, mo[class]->M);  /* updating current class */  /* itarate over all states of the current model */  for (i = 0; i < mo[class]->N; i++) {    if (mo[class]->s[i].fix)      continue;    size = ghmm_ipow (mo[class], mo[class]->M, mo[class]->order[i] + 1);    for (hist = 0; hist < size; hist += mo[class]->M) {      for (h = hist; h < hist + mo[class]->M; h++) {        /* iterate over all classes and training sequences */        sum = 0.0;        for (m = 0; m < noC; m++) {          if (m == class)            for (l = 0; l < sqs[m]->seq_number; l++)              sum += omega[class][l] * expect_b[class][l][class][i][h];          else            for (l = 0; l < sqs[m]->seq_number; l++)              sum -= omegati[m][l][class] * expect_b[m][l][class][i][h];        }        /* check for valid new parameters */        b_old[h] = mo[class]->s[i].b[h];        if (fabs (b_old[h]) == 0.0)          b_new[h] = b_old[h];        else          b_new[h] = b_old[h] + discrime_lambda * (sum / b_old[h]);      }      /* change parameters to fit into valid parameter range */      discrime_trim_gradient (b_new, mo[0]->M);      for (h = hist; h < hist + mo[class]->M; h++) {        /* update parameters */        /*printf("update parameter B[%d] in state %d of model %d with: %5.3g",           h, i, class, b_new[h]);           printf(" (%5.3g) old value.\n", b_old[h]); */        mo[class]->s[i].b[h] = b_new[h];      }    }  }STOP:     /* Label STOP from ARRAY_[CM]ALLOC */  m_free (b_old);  m_free (b_new);  return;#undef CUR_PROC}/*----------------------------------------------------------------------------*/static void discrime_update_pi_closed (ghmm_dmodel ** mo, ghmm_dseq ** sqs, int noC,                                int class, double lfactor,                                double ****expect_pi, long double **omega,                                long double ***omegati){#define CUR_PROC "discrime_update_pi_closed"  int i, l, m;  double *pi_old = NULL;  double *pi_new = NULL;  double sum, lagrangian;  ghmm_dseq *sq = NULL;  ARRAY_CALLOC (pi_old, mo[class]->N);  ARRAY_CALLOC (pi_new, mo[class]->N);  /* updating current class (all or only specified) */  /* itarate over all states of the k-th model to compute lagrangian multiplier */  lagrangian = 0.0;  for (i = 0; i < mo[class]->N; i++) {    /* iterate over all classes */    for (m = 0; m < noC; m++) {      sq = sqs[m];      /* if current class and class of training sequence match, add the first term */      if (m == class)        for (l = 0; l < sq->seq_number; l++)          lagrangian -= omega[class][l] * expect_pi[class][l][class][i];      /* otherwise the second */      else        for (l = 0; l < sq->seq_number; l++)          lagrangian +=            lfactor * omegati[m][l][class] * expect_pi[m][l][class][i];    }  }  for (i = 0; i < mo[class]->N; i++) {    /* iterate over all classes */    sum = 0.0;    for (m = 0; m < noC; m++) {      sq = sqs[m];      /*iterate over all training sequences */      if (m == class)        for (l = 0; l < sq->seq_number; l++)          sum -= omega[class][l] * expect_pi[class][l][class][i];      else        for (l = 0; l < sq->seq_number; l++)          sum += lfactor * omegati[m][l][class] * expect_pi[m][l][class][i];    }    /* check for valid new parameter */    pi_old[i] = mo[class]->s[i].pi;    if (fabs (lagrangian) == 0.0)      pi_new[i] = pi_old[i];    else      pi_new[i] = sum / lagrangian;  }  /* update parameters */  for (i = 0; i < mo[class]->N; i++) {    /* printf("update parameter Pi in state %d of model %d with: %5.3g",       i, class, pi_new[i]);       printf(" (%5.3g) old value.\n", pi_old[i]); */    mo[class]->s[i].pi = TRIM (pi_old[i], pi_new[i]);  }STOP:     /* Label STOP from ARRAY_[CM]ALLOC */  m_free (pi_old);  m_free (pi_new);  return;#undef CUR_PROC}/*----------------------------------------------------------------------------*/static void discrime_update_a_closed (ghmm_dmodel ** mo, ghmm_dseq ** sqs, int noC,                               int class, double lfactor, double ****expect_a,                               long double **omega, long double ***omegati){#define CUR_PROC "discrime_update_a_closed"  int i, j, g, l, m;  int j_id;  double *a_old = NULL;  double *a_new = NULL;  double sum, lagrangian;  ghmm_dseq *sq = NULL;  ARRAY_CALLOC (a_old, mo[class]->N);  ARRAY_CALLOC (a_new, mo[class]->N);  /* updating current class (all or only specified) */  /* itarate over all states of the k-th model */  for (i = 0; i < mo[class]->N; i++) {    /* compute lagrangian multiplier */    lagrangian = 0.0;    for (j = 0; j < mo[class]->s[i].out_states; j++) {      j_id = mo[class]->s[i].out_id[j];      /* iterate over all classes and training sequences */      for (m = 0; m < noC; m++) {        sq = sqs[m];        if (m == class)          for (l = 0; l < sq->seq_number; l++)            lagrangian -=              omega[class][l] * expect_a[class][l][class][i * mo[class]->N +                                                          j_id];        else          for (l = 0; l < sq->seq_number; l++)            lagrangian +=              lfactor * omegati[m][l][class] * expect_a[m][l][class][i *                                                                     mo                                                                     [class]->                                                                     N +                                                                     j_id];      }    }    for (j = 0; j < mo[class]->s[i].out_states; j++) {      j_id = mo[class]->s[i].out_id[j];

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -