⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 cluster2.c

📁 it is the Data Mining Algorithm source code.
💻 C
📖 第 1 页 / 共 4 页
字号:
      t = v +lrv *mat_get(c->cov, 0, 0);      mat_set(c->cov, 0, 0, (t > 0) ? t : v);    }                           /* compute new (co)variances */  }                             /* or new isotropic variance */  clset->steps++;               /* count the update step */}  /* _backprop() *//*----------------------------------------------------------------------  Neural Network Update Functions----------------------------------------------------------------------*/static double _standard (CLSET *clset,                         double grd, double prv, double *chg){                               /* --- standard update */  return grd;                   /* (this function is actually unused) */}  /* _standard() *//*--------------------------------------------------------------------*/static double _expand (CLSET *clset,                       double grd, double prv, double *chg){                               /* --- update expanded by a factor */  return clset->growth *grd;}  /* _expand() *//*--------------------------------------------------------------------*/static double _momentum (CLSET *clset,                         double grd, double prv, double *chg){                               /* --- update with momentum term */  double g, d;                  /* temporary buffers */  *chg = grd +*chg *clset->moment; /* compute the parameter change */  if (clset->bkprop)            /* if backpropagation (RBF network), */    return *chg;                /* return the computed change */  g = fabs(grd); d = fabs(*chg);  if (d < g) return *chg = grd; /* update at least with the gradient */  g *= clset->maxchg;           /* constrain the parameter change */  if (d > g) return *chg = grd *clset->maxchg;  return *chg;                  /* return the parameter change */}  /* _momentum() *//*--------------------------------------------------------------------*/static double _adaptive (CLSET *clset,                         double grd, double prv, double *chg){                               /* --- self-adaptive learning rate */  prv *= grd;                   /* compute relative change directions */  if      (prv < 0) {           /* if gradients have opposite signs */    *chg *= clset->shrink;      /* decrease the learning rate */    if (*chg < clset->minchg) *chg = clset->minchg; }  else if (prv > 0) {           /* if gradients have the same sign */    *chg *= clset->growth;      /* increase the learning rate */    if (*chg > clset->maxchg) *chg = clset->maxchg;  }                             /* clamp to learning rate range */  return *chg *grd;             /* return the parameter change */}  /* _adaptive() *//*--------------------------------------------------------------------*/static double _resilient (CLSET *clset,                          double grd, double prv, double *chg){                               /* --- resilient backpropagation */  double g;                     /* temporary buffer */  if (*chg == 0) {              /* if no step has been carried out, */    *chg = fabs(grd); return grd; }        /* initialize the change */  prv *= grd;                   /* compute relative change directions */  if      (prv > 0)             /* if gradients have the same sign, */    *chg *= clset->growth;      /* increase the change value */  else if (prv < 0)             /* if gradients have opposite signs, */    *chg *= clset->shrink;      /* decrease the change value */  if (clset->bkprop) {          /* if backpropagation (RBF network) */    if (*chg < clset->minchg) *chg = clset->minchg;    if (*chg > clset->maxchg) *chg = clset->maxchg; }  else {                        /* if clustering update */    g = fabs(grd);              /* update at least with the gradient */    if (*chg < g) *chg = g;     /* (do at least standard update) */    g *= clset->maxchg;         /* restrict the parameter change */    if (*chg > g) *chg = g;     /* to maxchg times the gradient */  }  return (grd < 0) ? -*chg : *chg; /* return the parameter change */}  /* _resilient() *//*--------------------------------------------------------------------*/static double _quick (CLSET *clset,                      double grd, double prv, double *chg){                               /* --- quickprop analog */  double g, d;                  /* temporary buffers */    if (prv == 0)                 /* if this is the first update, */    return *chg = grd;          /* do a standard update step */  d = prv -grd;                 /* compute the gradient change */  if (*chg *d <= 0)             /* if the parabola opens downwards, */    return *chg = grd;          /* do a standard update step */  g = prv *clset->maxfac;       /* compute limit for new gradient */  if (prv > 0) {                /* if previous gradient was positive */    if (grd < g)                /* compute the factor for a jump */      *chg *= grd /d;           /* to the minimum (apex of parabola) */    else                        /* if the growth factor would become */      *chg *= clset->growth; }  /* too large, use the maximal factor */  else {                        /* if previous gradient was negative */    if (grd > g)                /* compute the factor for a jump */      *chg *= grd /d;           /* to the minimum (apex of parabola) */    else                        /* if the growth factor would become */      *chg *= clset->growth;    /* too large, use the maximal factor */  }                             /* (avoid jumps that are too large) */  d = fabs(*chg);               /* get the absolute change */  if (clset->bkprop) {          /* if backpropagation (RBF network) */    if (d < clset->minchg)      /* update at least with min. change */      *chg = (grd < 0) ? -clset->minchg : clset->minchg;    if (d > clset->maxchg)      /* update at most  with max. change */      *chg = (grd < 0) ? -clset->maxchg : clset->maxchg; }  else {                        /* if clustering update */    g = fabs(grd);              /* update at least with the gradient */    if (d < g) return *chg = grd;    g *= clset->maxchg;         /* at most with maxchg x grapdient */    if (d > g) return *chg = grd *clset->maxchg;  }                             /* (constrain the parameter change) */  return *chg;                  /* return the parameter change */}  /* _quick() *//*--------------------------------------------------------------------*/static UPDATEFN *_updatefn[] = {  /* CLS_NONE       0x00 */  _standard,  /* CLS_EXPAND     0x10 */  _expand,  /* CLS_MOMENTUM   0x20 */  _momentum,  /* CLS_ADPATIVE   0x30 */  _adaptive,  /* CLS_RESILIENT  0x40 */  _resilient,  /* CLS_QUICK      0x50 */  _quick,};                              /* list of update functions *//*--------------------------------------------------------------------*/static void _neural (CLSET *clset, int eigen){                               /* --- neural network inspired update */  int      i, k, n, m;          /* loop variables, buffers */  int      adapt;               /* flag for adaptive learning rate */  CLUSTER  *c;                  /* to traverse the clusters */  double   *s, *z, *x, *b;      /* to access the vectors */  double   v, w, d, g, p;       /* temporary buffers */  UPDATEFN *update;             /* parameter update function */  assert(clset                  /* check the function argument */  &&    (clset->method & CLS_MODIFIER));  m = clset->method & CLS_MODIFIER;  adapt  = (m == CLS_ADAPTIVE); /* get the adaptive update flag */  update = _updatefn[m >> 4];   /* and the parameter update function */  /* --- update the cluster centers --- */  for (c = clset->cls +(i = clset->clscnt); --i >= 0; ) {    --c;                        /* traverse the clusters */    s = c->ctr; z = c->sum; x = c->chc; b = c->grc;    for (k = clset->incnt; --k >= 0; ) {      d    = s[k] -z[k];        /* traverse the dimensions */      s[k] = z[k] +update(clset, d, b[k], x+k);      b[k] = d;                 /* compute the current gradient, */    }                           /* update the center coordinate, */  }                             /* and store the current gradient */  if (!(clset->type   & (CLS_COVARS|CLS_VARS|CLS_SIZE))  ||  !(clset->method &  CLS_MODVAR))    return;                     /* check for (co)variance adaptation */  /* --- update only variances/isotropic variance --- */  if (!(clset->type & CLS_COVARS)) {    m = (clset->type & CLS_VARS)  ? clset->incnt : 1;    n = (clset->type & CLS_JOINT) ? 1 : clset->clscnt;    for (c += n; --n >= 0; ) {  /* traverse the relevant clusters */      --c;                      /* (either all or just the first) */      for (i = m; --i >= 0; ) { /* traverse the variances */        v = mat_get(c->smp, i, i);  /* get the old and the */        w = mat_get(c->cov, i, i);  /* new value of a variance, */        g = mat_get(c->grv, i, i);  /* the previous gradient */        p = mat_get(c->chv, i, i);  /* and the previous change */        v += update(clset, d = w-v, g, &p);        if (v < MINVAR) {       /* compute the new variance */          v = w; p = (adapt) ? 1 : d; }        mat_set(c->cov, i, i, v);   /* store the new variance, */        mat_set(c->grv, i, i, d);   /* its (unmodified) gradient */        mat_set(c->chv, i, i, p);   /* and the updated parameter */      }                         /* (the parameter has been reset */    } }                         /* if the variance got negative) */  /* --- update full covariance matrix --- */  else {                        /* if there are covariances */    n = (clset->type & CLS_JOINT) ? 1 : clset->clscnt;    for (c += n; --n >= 0; ) {  /* traverse the relevant clusters */      --c;                      /* (either all or just the first) */      for (i = clset->incnt; --i >= 0; ) {        for (k = clset->incnt; --k >= i; ) {          v = mat_get(c->smp, i, k);    /* get a (co)variance */          d = mat_get(c->cov, i, k) -v; /* and its change */          g = mat_get(c->grv, i, k);    /* get previous gradient */          p = mat_get(c->chv, i, k);    /* and previous change */          v += update(clset, d, g, &p); /* update the (co)variance */          mat_set(c->mat, i, k, v);     /* store the value of the */          mat_set(c->grv, i, k, d);     /* covariance matrix element */          mat_set(c->chv, i, k, p);     /* and the updated parameter */        }                       /* (compute a new covariance matrix */      }                         /* in the matrix buffer) */      if (!eigen)               /* do Cholesky decomposition */        k = mat_chdecom(c->inv, c->mat);      else {                    /* do eigenvalue decomposition */        mat_3dred(c->buf, c->buf +n, c->inv, c->mat, MAT_UPPER);        k = mat_3dqli(c->dif, c->inv, c->buf, c->buf +n, n, 256);        if (k == 0) {           /* if successful decomposition */          mat_evsort(c->dif, c->inv, 1);          k = (c->dif[0] < MINVAR) ? -1 : 0;        }                       /* check for positive eigenvalues */      }                         /* (positive definite matrix) */      if (k == 0) {             /* if successful decomposition */        mat_copyx(c->cov, c->mat, MAT_UPPER);        c->dec = -1; }          /* copy the update result */      else {                    /* if decomposition failed */        if (!adapt) mat_addx(c->chv, c->cov, -1, c->smp, MAT_UPPER);        else { p = 1; mat_init(c->chv, MAT_UPPER|MAT_VALUE, &p); }      }                         /* on failure reinitialize */    }                           /* the additional parameter */  }                             /* (reset to consistent state) */  /* If the update of the covariance matrix fails, the originally */  /* computed covariance matrix is kept (standard update step).   */}  /* _neural() *//*----------------------------------------------------------------------  Regularization Functions----------------------------------------------------------------------*/static void _regshape (CLSET *clset){                               /* --- regularize cluster shapes */  int     i, k, n;              /* loop variables, buffer */  CLUSTER *c;                   /* to traverse the clusters */  double  t, h, s, a;           /* temporary buffers */  double  min, max;             /* minimum and maximum eigenvalue */  assert(clset                  /* check the function argument */     && (clset->incnt > 1)      /* and the regularization need */     && (clset->type & (CLS_COVARS|CLS_VARS))     && (clset->eigen || !(clset->type & CLS_COVARS))     && ((clset->fwexp <= 0) || (clset->fwexp >= 1)));  h = clset->regps[3]; s = h*h; /* get the regularization parameter */  assert((h < -1) || (h > 0));  /* and check for the active range */  n = clset->incnt;             /* get the number of dimensions */  i = (clset->type & CLS_JOINT) ? 1 : clset->clscnt;  for (c = clset->cls +i; --i >= 0; ) {    --c;                        /* traverse the clusters */    if (h > 0)                  /* if to apply standard version */      a = s *c->var;            /* compute offset h^2 * \sigma^2 */    else {                      /* if to apply alternative version */      min = DBL_MAX; max = 0;   /* initialize the eigenvalue range */      for (k = n; --k >= 0; ) { /* traverse the eigenvalues */        t = (clset->type & CLS_COVARS)          ? c->dif[k] : mat_get(c->cov, k, k);        if (t < min) min = t;   /* find the minimum and maximum */        if (t > max) max = t;   /* of the eigenvalues (axes lengths) */      }                         /* for the ratio computation */      t = max -s *min;          /* compute numerator of fraction */

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -