📄 cluster2.c
字号:
nst = clset->nst; /* get the numerical statistics */ switch (mode & 0xf) { /* evaluate the initialization mode */ case CLS_CENTER: /* -- center of the data space */ c = clset->cls->ctr; /* compute the center */ nst_center(nst, c); /* of the data space */ for (p = clset->cls +(k = clset->clscnt); --k > 0; ) vec_copy((--p)->ctr, c, clset->incnt); break; /* copy the center to all clusters */ case CLS_DIAG: /* -- diagonal of the data space */ case CLS_LATIN: /* -- latin hypercube sampling */ for (i = clset->incnt; --i >= 0; ) { m = nst_max(nst, i); /* compute value decrement */ d = (m -nst_min(nst, i)) / clset->clscnt; x = m -0.5*d; /* compute last value */ for (p = clset->cls +(k = clset->clscnt); --k >= 0; ) { (--p)->ctr[i] = x; x -= d; } } /* set equally spaced values */ if (mode == CLS_DIAG) /* if only to set the diagonal, */ break; /* there is nothing else to be done */ /* case CLS_LATIN: */ /* -- latin hypercube sampling */ p = clset->cls; /* shuffle elements of the centers */ for (n = clset->clscnt; --n > 0; ) { for (i = clset->incnt; --i >= 0; ) { k = (int)((n+1) *randfn()); if (k > n) k = n; /* compute a random index in */ else if (k < 0) k = 0; /* the remaining set of centers */ x = p[k].ctr[i]; p[k].ctr[i] = p[n].ctr[i]; p[n].ctr[i] = x; /* exchange the i-th elements of the */ } /* k-th and the n-th cluster center */ } break; /* (shuffle dimensions independently) */ case CLS_POINTS: /* -- given points in the data space */ if (vec) nst_norm(nst, vec, clset->vec); vec = clset->vec; /* scale given vector to the buffer */ if (clset->init >= clset->clscnt) clset->init = 0; /* if all clusters are init., restart */ p = clset->cls +clset->init++; vec_copy(p->ctr, vec, clset->incnt); break; /* copy the given vector */ case CLS_UNIFORM: /* -- uniformly distributed */ default: /* (this is also the default) */ nst_spans(nst, b = clset->vec); /* get the value spans */ for (p = clset->cls +(k = clset->clscnt); --k >= 0; ) for (c = (--p)->ctr, i = clset->incnt; --i >= 0; ) c[i] = nst_min(nst, i) +b[i] *randfn(); break; /* set cluster centers to random */ } /* points in the data space */ /* --- scale the coordinates --- */ if ((mode & 0xf) != CLS_POINTS) for (p = clset->cls +(k = clset->clscnt); --k >= 0; ) { --p; nst_norm(nst, p->ctr, p->ctr); } if (((mode & 0xf) != CLS_POINTS) || (clset->init >= clset->clscnt)) { /* --- add a random offset --- */ if (range > 0) { /* if a range for offsets is given */ for (p = clset->cls +(k = clset->clscnt); --k >= 0; ) for (c = (--p)->ctr +(i = clset->incnt); --i >= 0; ) *--c += (2 *randfn() -1) *range; } /* add a random offset to all values */ /* --- normalize the centers --- */ if (mode & CLS_UNIT) /* if centers on the unit sphere */ _normctr(clset, 0); /* normalize the cluster centers */ }} /* cls_init() *//*--------------------------------------------------------------------*/void cls_method (CLSET *clset, int method){ /* --- set parameter update method */ int i; /* loop variable */ CLUSTER *p; /* to traverse the clusters */ double t; /* initialization value */ assert(clset /* check the function arguments */ && ((method & CLS_METHOD) >= CLS_GRADIENT) && ((method & CLS_METHOD) <= CLS_BACKPROP) && ((method & CLS_MODIFIER) >= CLS_NONE) && ((method & CLS_MODIFIER) <= CLS_QUICK)); clset->method = method; /* note the parameter update method */ method &= CLS_MODIFIER; /* get the update modifier */ if (method > CLS_EXPAND) { /* if one of the higher methods */ t = (method == CLS_ADAPTIVE) ? 1 : 0; for (p = clset->cls +(i = clset->clscnt); --i >= 0; ) { --p; mat_init(p->chv, MAT_VALUE, &t); mat_init(p->bfv, MAT_ZERO, NULL); } /* initialize the change matrix */ } /* and the buffer matrix */} /* cls_method() *//*--------------------------------------------------------------------*/void cls_regular (CLSET *clset, const double *params){ /* --- set regularization parameters */ int i; /* loop variable */ assert(clset && params); /* check the function arguments */ for (i = 5; --i >= 0; ) clset->regps[i] = params[i];} /* cls_regular() */ /* copy the parameters *//*--------------------------------------------------------------------*/void cls_lrate (CLSET *clset, const double *lrates,const double *decays){ /* --- set learning rate parameters */ int i; /* loop variable */ assert(clset); /* check the function arguments */ if (lrates) { /* if learning rates are given, */ for (i = 3; --i >= 0; ) /* copy them to the cluster set */ clset->lrates[i] = lrates[i]; } if (decays) { /* if decay parameters are given, */ for (i = 3; --i >= 0; ) /* copy them to the cluster set */ clset->decays[i] = decays[i]; }} /* cls_lrate() *//*--------------------------------------------------------------------*/int cls_aggr (CLSET *clset, const double *vec, double weight){ /* --- aggregate a data vector */ int i, n; /* cluster index, loop variable */ CLUSTER *c; /* to traverse the clusters */ MATADDFN *add; /* aggregation function */ double msd; /* membership degree */ double exp; /* adaptation exponent */ assert(clset); /* check the function arguments */ i = cls_exec(clset,vec,NULL); /* compute degrees of membership */ vec = clset->vec; /* get the buffered vector */ exp = fabs(clset->msexp); /* and the adaptation exponent */ if (clset->type & CLS_COVARS) add = mat_addmp; else if (clset->type & (CLS_VARS|CLS_SIZE)) add = mat_addsv; else add = mat_addvec; /* --- alternating estimation --- */ /* Aggregate the data vectors for the estimation step. */ if ((clset->method & CLS_METHOD) == CLS_ALTOPT) { if (exp <= 0) { /* if hard/crisp clustering */ c = clset->cls +i; /* get the cluster to assign to */ add(c->smp, vec, weight); } /* sum the weighted vector */ else { /* if fuzzy/prob. clustering */ for (c = clset->cls +(n = clset->clscnt); --n >= 0; ) { msd = (--c)->msd; /* traverse the clusters */ if (msd <= 0) continue; /* skip cluster with zero membership */ if (exp == 2) msd *= msd; else if (exp != 1) msd = pow(msd, exp); msd *= weight; /* compute the data point weight */ add(c->smp, vec, msd); /* sum the data vector weighted */ } /* with the degree of membership */ } } /* --- competitive learning --- */ /* Aggregate the difference vectors for the update step. */ else if ((clset->method & CLS_METHOD) == CLS_COMPLRN) { if (exp <= 0) { /* if hard/crisp clustering */ c = clset->cls +i; /* get the cluster to assign to */ add(c->smp, c->dif, weight); } /* sum the weighted vector */ else { /* if fuzzy/prob. clustering */ for (c = clset->cls +(n = clset->clscnt); --n >= 0; ) { msd = (--c)->msd; /* traverse the clusters */ if (msd <= 0) continue; /* skip cluster with zero membership */ if (exp == 2) msd *= msd; else if (exp != 1) msd = pow(msd, exp); msd *= weight; /* compute the data point weight */ add(c->smp, c->dif, msd); } /* sum the difference vector weighted */ } } /* with the degree of membership */ /* --- gradient based update --- */ else { /* if (method == CLS_GRADIENT) */ /* ... to be done ... */ } return i; /* return index of best cluster */} /* cls_aggr() *//*--------------------------------------------------------------------*/void cls_bkprop (CLSET *clset, const double *errs){ /* --- backpropagate errors */ int i, n; /* loop variables, cluster index */ int type; /* cluster type flags */ CLUSTER *p; /* to traverse the clusters */ double *c, *d, t; /* to access the vectors, buffers */ assert(clset && errs); /* check the function arguments */ type = clset->type; /* get the cluster type flags */ for (p = clset->cls +(n = clset->clscnt); --n >= 0; ) { d = (--p)->dif; /* traverse the clusters */ t = -errs[n] *clset->drvfn(p->d2, clset->rfnps, p->msd); if (type & CLS_COVARS){/* -- if adaptable covariances */ mat_mulmv(clset->buf, p->inv, d); mat_addmpx(p->smp, d, t); /* compute derivative terms */ d = clset->buf; } /* and get the buffered result */ else if (type & CLS_VARS) { /* -- if adaptable variances */ mat_muldv(clset->buf, p->inv, d); mat_addsvx(p->smp, d, t); /* compute derivative terms */ d = clset->buf; } /* and get the buffered result */ else if (type & CLS_SIZE) { /* -- if adaptable isotropic var. */ t = t /p->var; /* include the variance */ mat_inc(p->smp, 0, 0, -t *p->d2); } /* sum the variance gradients */ t *= -2; /* compute center update factor */ c = p->sum +(i = clset->incnt); for (d += i; --i >= 0; ) *--c += *--d *t; } /* sum the center gradients */} /* cls_bkprop() *//*--------------------------------------------------------------------*/double cls_update (CLSET *clset, int conly){ /* --- update a set of clusters */ int n, type; /* loop variable, type buffer */ CLUSTER *p; /* to traverse the clusters */ double sum = 0; /* sum of cluster weights */ assert(clset); /* check the function argument */ type = clset->type; /* note and replace the cluster type */ if (conly) clset->type = CLS_CENTER; /* --- determine the update weights --- */ if ((clset->method & CLS_METHOD) != CLS_BACKPROP) { for (p = clset->cls +(n = clset->clscnt); --n >= 0; ) { --p; p->nw = mat_weight(p->smp); if (p->nw >= MINWEIGHT) { p->d2 = 0; } else { p->nw = MINWEIGHT; p->d2 = -1; } sum += p->nw; /* get and adapt the cluster weights */ } /* and sum them for a normalization */ if (sum <= 0) return 0; /* check for a proper update */ clset->msd[1] = 1.0/sum; /* note the normalization factor */ } /* --- compute new parameters --- */ switch (clset->method & CLS_METHOD) { case CLS_ALTOPT : _altopt (clset); break; case CLS_COMPLRN : _complrn (clset); break; case CLS_BACKPROP: _backprop(clset); break; default : _gradient(clset); break; } /* call the approp. update function */ /* --- regularize the parameters --- */ _regshape (clset); /* regularize cluster shapes, */ _regsize (clset); /* cluster sizes, and */ _regweight(clset); /* cluster weights */ /* --- update the parameters --- */ sum = ((clset->method & CLS_MODIFIER) == CLS_NONE) ? _stdupd(clset) /* update the cluster parameters */ : _nnupd (clset); /* with the given update modifier */ if (conly) clset->type = type;/* restore the cluster type */ if (clset->method & CLS_ORIGIN) { /* if cluster centers at origin, */ _zeroctr(clset, 1); return 1; } /* zero the center vectors */ if (clset->method & CLS_UNIT) /* if centers on unit sphere, */ _normctr(clset, 0); /* (re)normalize the vectors */ return sum; /* return the maximal change */} /* cls_update() */
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -