📄 cluster2.c
字号:
assert(clset); /* check the function argument */ type = clset->type; /* get the cluster type flags */ lrc = clset->lrates[0]; /* get the center learning rate */ lrv = clset->lrates[1]; /* and the radius learning rate */ clset->steps++; /* count the update step */ for (p = clset->cls +(n = clset->clscnt); --n >= 0; ) { s = (--p)->sum; c = p->ctr; /* get aggregation vector and center */ for (i = clset->incnt; --i >= 0; ) s[i] = c[i] -lrc *s[i]; /* compute new center coordinates */ if (type & CLS_COVARS) { /* -- if adaptable covariances */ mat_trmuls(p->smp, p->smp, MAT_UPPER, -lrv); mat_addx(p->smp, p->smp, 1, p->inv, MAT_UPPER); _decom(p); mat_chinv(p->smp, p->inv); d = _decom(p); /* update the covariance matrix */ p->msd = ((d >= MINDET) && (d <= MAXDET)) ? pow(d, 1.0/clset->incnt) : exp(mat_chlogd(p->inv) /clset->incnt); } else if (type & CLS_VARS) { /* -- if adaptable variances */ for (d = 1, i = clset->incnt; --i >= 0; ) { t = mat_get(p->inv, i, i) -lrv *mat_get(p->smp, i, i); t = (t > 0) ? 1/t : MAXVAR; if (t < MINVAR) t = MINVAR; else if (t > MAXVAR) t = MAXVAR; d *= mat_set(p->smp, i, i, t); } /* update the covariance matrix */ p->msd = ((d >= MINDET) && (d <= MAXDET)) ? pow(d, 1.0/clset->incnt) : exp(mat_dialog(p->smp) /clset->incnt); } else if (type & CLS_SIZE) /* -- if adaptable isotropic var. */ p->msd = p->var -lrv *mat_get(p->smp, 0, 0); else p->msd = p->var; /* copy the isotropic variance */ if (p->msd < MINVAR) p->msd = MINVAR; else if (p->msd > MAXVAR) p->msd = MAXVAR; } /* compute new isotropic variance */} /* _backprop() *//*---------------------------------------------------------------------- Regularization Functions----------------------------------------------------------------------*/static void _regshape (CLSET *clset){ /* --- regularize cluster shapes */ int i, k; /* loop variable */ int type; /* cluster type flags */ CLUSTER *p; /* to traverse the clusters */ double t, a, b; /* temporary buffers */ double min, max; /* minimum and maximum eigenvalue */ assert(clset); /* check the function argument */ type = clset->type; /* get cluster type flags */ a = clset->regps[3]; /* and the regularization parameter */ if (!(type & (CLS_COVARS|CLS_VARS)) || (clset->incnt <= 1) /* check whether the shape is fixed, */ || ((a <= 0) && (a >= -1))) /* the data space is one-dimensional, */ return; /* or no regularization is to be done */ for (p = clset->cls +(i = clset->clscnt); --i >= 0; ) { if ((--p)->d2 < 0) continue;/* traverse clusters to be updated */ b = clset->regps[3]; /* get the regularization parameter */ if (b > 0) /* if standard version */ a = b*b *p->msd; /* compute offset \sigma^2 * h^2 */ else { /* if alternative version */ if (type & CLS_COVARS) { /* if adaptable covariances */ mat_3dred(clset->buf, clset->buf +clset->incnt, clset->mat, p->smp, MAT_UPPER); mat_3dqli(clset->buf, NULL, clset->buf, clset->buf +clset->incnt, clset->incnt, 256); } /* compute the eigenvalues */ min = DBL_MAX; max = 0; /* initialize the eigenvalue range */ for (k = clset->incnt; --k >= 0; ) { t = (type & CLS_COVARS) ? clset->buf[k] : mat_get(p->smp, k, k); if (t < min) min = t; /* traverse the eigenvalues and */ if (t > max) max = t; /* find their minimum and maximum */ } /* for the ratio computation */ a = b*b; t = max -a *min; /* compute numerator of fraction */ if (t <= 0) continue; /* and check against maximum ratio */ a = t /(a -1); /* compute the regularization value */ } /* to ensure the maximum ratio */ mat_diaadds(p->smp, a); /* and add it to the diagonal */ if (type & CLS_COVARS) { /* if adaptable covariances */ mat_chdecom(p->inv, p->smp); t = mat_chdet(p->inv); /* decompose the covariance matrix */ t = ((t >= MINDET) && (t <= MAXDET)) ? pow(t, 1.0/clset->incnt) : exp(mat_chlogd(p->inv) /clset->incnt); if (t < MINVAR) t = MINVAR; if (t > MAXVAR) t = MAXVAR; mat_trmuls(p->smp, p->smp, MAT_UPPER, p->msd /t); mat_chdecom(p->inv, p->smp); } else if (type & CLS_VARS) { /* if adaptable variances */ t = mat_diaprod(p->smp); /* compute new determinant */ t = ((t >= MINDET) && (t <= MAXDET)) ? pow(t, 1.0/clset->incnt) : exp(mat_dialog(p->smp) /clset->incnt); if (t < MINVAR) t = MINVAR; if (t > MAXVAR) t = MAXVAR; mat_diamuls(p->smp, p->msd /t); } /* rescale the (co)variances */ }} /* _regshape() *//*--------------------------------------------------------------------*/static void _regsize (CLSET *clset){ /* --- regularize cluster sizes */ int i; /* loop variable */ int type; /* cluster type flags */ CLUSTER *p; /* to traverse the clusters */ double s, t, a, b; /* temporary buffers */ double min, max; /* minimum and maximum size */ assert(clset); /* check the function argument */ type = clset->type; /* get cluster type flags, */ s = clset->regps[2]; /* the scaling factor, */ a = clset->regps[1] *0.5; /* the variance exponent, and */ b = clset->regps[0]; /* the regularization offset */ if (!(type & CLS_SIZE) /* check whether the size is fixed */ || (a <= 0) || (s == 0) || ((s == 1) && (b == 0))) return; /* or no regularization is to be done */ /* --- prepare clusters not updated --- */ for (p = clset->cls +(i = clset->clscnt); --i >= 0; ) { if ((--p)->d2 >= 0) continue; /* traverse marked clusters */ p->d2 = 0; /* mark the cluster for update */ p->msd = p->var; /* copy the isotropic variance */ if (type & CLS_COVARS){/* copy the old covariances */ mat_copy(p->smp, p->cov, MAT_UPPER); mat_chdecom(p->inv, p->smp); } else if (type & CLS_VARS) /* copy the old variances */ mat_copy(p->smp, p->cov, MAT_DIAG); else /* copy the old center vector */ vec_copy(p->sum, p->ctr, clset->incnt); } /* (make new state identical to old) */ /* --- regularize the cluster size --- */ if (b < -1) { /* if alternative version */ min = DBL_MAX; max = 0; /* initialize the size range */ for (p += i = clset->clscnt; --i >= 0; ) { t = pow((--p)->msd, a); /* compute the cluster sizes */ if (t < min) min = t; /* and determine their minimum */ if (t > max) max = t; /* and their maximum for the */ } /* size ratio computation */ t = max +b *min; /* compute numerator of fraction */ if (t <= 0) return; /* check against the maximum ratio */ b = t /(-b -1); /* compute the regularization offset */ } /* to ensure the maximum ratio */ if (clset->regps[2] > 0) { /* if to compute full-fledged version */ if (b >= 0) { /* if to equalize the sizes partially */ for (s = 0, p += i = clset->clscnt; --i >= 0; ) { s += t = pow((--p)->msd, a); p->d2 = t +b; /* sum the cluster sizes and */ } /* note the increased cluster sizes */ s /= s +clset->clscnt *b;}/* compute the renormalization factor */ else { /* if to equalize the sizes fully */ for (s = 0, p += i = clset->clscnt; --i >= 0; ) { (--p)->d2 = 1; s += pow(p->msd, a); } s /= clset->clscnt; /* sum the cluster sizes and */ } /* compute the normalization factor */ s *= clset->regps[2]; } /* multiply with the scaling factor */ else { /* if to compute simplified version */ if (b < 0) return; /* check for non-negative offset */ for (p += i = clset->clscnt; --i >= 0; ) { --p; p->d2 = pow(p->msd, a) +b; } s = -clset->regps[2]; /* compute the new cluster sizes */ } /* and get the size scaling factor */ b = 1/a; /* get the inverse variance exponent */ for (p += i = clset->clscnt; --i >= 0; ) { t = pow(s *(--p)->d2, b); /* compute the new cluster size */ a = t/p->msd; p->msd = t; /* get the scaling factor and store */ if (type & CLS_COVARS) /* if adaptable covariances */ mat_trmuls(p->smp, p->smp, MAT_UPPER, a); else if (type & CLS_VARS) /* if adaptable variances */ mat_diamuls(p->smp, a); /* scale (co)variances to new size */ } /* (that is, new determinant) */} /* _regsize() *//*--------------------------------------------------------------------*/static void _regweight (CLSET *clset){ /* --- regularize cluster weights */ int i; /* loop variable */ CLUSTER *p; /* to traverse the clusters */ double min, max; /* minimum and maximum weight */ double t, a; /* regularization parameter, buffer */ assert(clset); /* check the function argument */ a = clset->regps[4]; /* get the weight reg. parameter */ if (!(clset->type & CLS_WEIGHT) || ((a <= 0) && (a >= -1))) /* check whether the weight is fixed */ return; /* or no regularization is to be done */ min = DBL_MAX; max = 0; /* initialize sum and weight range */ for (p = clset->cls +(i = clset->clscnt); --i >= 0; ) { if ((--p)->nw < min) min = p->nw; if ( p ->nw > max) max = p->nw; } /* determine the weight range */ if (a < 0) { /* if alternative reg. version, */ a = -a; t = max -a *min; /* compute numerator of fraction */ a = (t > 0) ? t /(a-1) : 0; /* and check against maximum ratio */ } t = 1 /(1 +a *clset->clscnt); /* compute the normalization factor */ for (p += i = clset->clscnt; --i >= 0; ) { --p; p->nw = t *(p->nw+a);} /* compute new cluster weights */} /* _regweight() *//*---------------------------------------------------------------------- Parameter Update Functions----------------------------------------------------------------------*/static double _standard (CLSET *clset, double grd, double *prv, double *chg){ /* --- standard update */ return grd;} /* _standard() *//*--------------------------------------------------------------------*/static double _expand (CLSET *clset, double grd, double *prv, double *chg){ /* --- update expanded by a factor */ return clset->growth *grd;} /* _expand() *//*--------------------------------------------------------------------*/static double _momentum (CLSET *clset, double grd, double *prv, double *chg){ /* --- update with momentum term */ return *chg = grd +*chg *clset->moment;} /* _momentum() *//*--------------------------------------------------------------------*/static double _adaptive (CLSET *clset, double grd, double *prv, double *chg){ /* --- self-adaptive learning rate */ double t; /* temporary buffer */ if (grd > 0) t = *prv; /* check the directions */ else if (grd < 0) t = -*prv; /* of the changes in this */ else t = 0; /* and the preceding step */ if (t > 0) { /* if gradients have the same sign */ *chg *= clset->growth; /* increase the learning rate */ if (*chg > clset->maxchg) *chg = clset->maxchg; *prv = grd; } /* note the current gradient */ else if (t < 0) { /* if gradients have opposite signs */ *chg *= clset->shrink; /* decrease the learning rate */ if (*chg < 1) *chg = 1; /* the minimum learning rate is 1 */ *prv = 0; } /* suppress a change in the next step */ else { /* if one gradient is zero */ *prv = grd; } /* only note the current gradient */ return *chg *grd; /* return the parameter change */} /* _adaptive() *//*--------------------------------------------------------------------*/static double _resilient (CLSET *clset, double grd, double *prv, double *chg){ /* --- resilient backpropagation */ double t; /* temporary buffer */ if (*chg == 0) { /* if no step has been carried out, */ *chg = fabs(grd); return *prv = grd; } /* initialize the change */ if (grd > 0) t = *prv; /* check the directions */ else if (grd < 0) t = -*prv; /* of the changes in this */ else t = 0; /* and the preceding step */ if (t > 0) { /* if gradients have the same sign */ *chg *= clset->growth; /* increase the learning rate */ if (*chg > clset->maxchg) *chg = clset->maxchg;
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -