⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 lvq.c

📁 LVQ - Learning Vector Quantization Demonstration Download xlvq Linux executable (128 kb) wlvq.ex
💻 C
📖 第 1 页 / 共 3 页
字号:
          p[n].ctr[i] = x;      /* exchange the i-th elements of */	}                       /* the k-th and the n-th center */      } break;                  /* (shuffle dimensions independently) */    case LVQ_POINTS:            /* -- given points in the data space */      if (!vec) vec = lvq->vec;      if (lvq->init >= lvq->cnt)        lvq->init = 0;          /* if all protos. are init., restart */      p = lvq->qps +lvq->init++;      nst_norm(nst, p->ctr, vec);      if (range <= 0) return;   /* copy and scale a given data point */      for (c = p->ctr, i = lvq->dim; --i >= 0; )        c[i] += (2 *randfn() -1) *range;      return;                   /* and add a random offset */    case LVQ_UNIFORM:           /* -- uniformly distributed */    default:                    /* (this is also the default) */      nst_spans(nst, b = lvq->buf);    /* get the value spans */      for (p = lvq->qps +(k = lvq->cnt); --k >= 0; )        for (c = (--p)->ctr, i = lvq->dim; --i >= 0; )          c[i] =  nst_min(nst, i) +b[i] *randfn();      break;                    /* set reference vectors to */  }                             /* random points in the data space */  /* --- scale the coordinates --- */  for (p = lvq->qps +(k = lvq->cnt); --k >= 0; ) {    c = (--p)->ctr; nst_norm(nst, c, c); }  /* --- add a random offset --- */  if (range > 0) {              /* if a range for offsets is given */    for (p = lvq->qps +(k = lvq->cnt); --k >= 0; )      for (c = (--p)->ctr, i = lvq->dim; --i >= 0; )        c[i] += (2 *randfn() -1) *range;  }                             /* add a random offset to all values */}  /* lvq_init() *//*--------------------------------------------------------------------*/void lvq_actfn (LVQNET *lvq, RADFN actfn, double *params){                               /* --- set the activation function */  lvq->actfn     = actfn;       /* note the activation function */  lvq->params[0] = params[0];   /* and its parameters */  lvq->params[1] = params[1];}  /* lvq_actfn() *//*--------------------------------------------------------------------*/int lvq_exec (LVQNET *lvq, double *vec, double *acts){                               /* --- compute ouputs / activations */  int    i, k, best;            /* loop variables, prototype index */  QPROT  *p;                    /* to traverse the prototypes */  double *c, *b;                /* to access the center and buffer */  double d2;                    /* squared distance of pattern */  double sum, max;              /* sum/maximum of neuron activations */  int    mode;                  /* activation normalization mode */  assert(lvq);                  /* check the function arguments */  if (vec)                      /* if a data vector is given, */    nst_norm(lvq->nst, vec, lvq->vec); /* scale it to the buffer */  vec = lvq->vec;               /* and then work with the buffer */  /* --- compute absolute neuron activations --- */  sum = best = 0; max = -DBL_MAX;  /* initialize the variables */  for (p = lvq->qps +(k = lvq->cnt); --k >= 0; ) {    c = (--p)->ctr;             /* traverse the prototypes */    for (b = p->dif, i = lvq->dim; --i >= 0; )      b[i] = vec[i] -c[i];      /* compute diff. vector to center */    p->d2 = d2 = vec_sqrlen(b, lvq->dim);    assert(d2 >= 0);            /* compute distance to center */    if (lvq->type & LVQ_SIZE)   /* if to use prototype sizes, */      d2 /= p->var;             /* divide by the variance */    p->act = lvq->actfn(d2, lvq->params);    assert(p->act >= 0);        /* compute the neuron output and */    sum += p->act;              /* sum it for the normalization */    if (p->act >= max) { max = p->act; best = k; }  }                             /* determine the winner neuron */  /* --- compute relative neuron activations --- */  mode = lvq->mode;             /* get the normalization mode */  if (lvq->wtarf > 0) {         /* if limited winner takes all */    d2 = lvq->wtarf;            /* square the winner takes all radius */    d2 *= d2;                   /* factor (distances are all squared) */    if (lvq->type & LVQ_SIZE)   /* compute the squared adapted */      d2 *= p[best].var;        /* radius for the winner neuron */    if (p[best].d2 < d2)        /* if the data point is inside */      mode = LVQ_HARD;          /* the adapted radius, activate */  }                             /* according to winner takes all */  if      (mode == LVQ_HARD) {  /* if the winner takes all */    for (p += k = lvq->cnt; --k >= 0; )      (--p)->act = 0;           /* set only the activation of */    p[best].act = 1; }          /* the best neuron, clear all other */  else if (mode == LVQ_MAX1) {  /* if to normalize to maximum 1 */    max = (max > 0) ? 1/max :1; /* get the normalization factor */    for (p += k = lvq->cnt; --k >= 0; )      (--p)->act *= max; }      /* compute rel. neuron activations */  else if (mode == LVQ_SUM1) {  /* if to normalize to sum 1 */    sum = (sum > 0) ? 1/sum :1; /* get the normalization factor */    for (p += k = lvq->cnt; --k >= 0; )      (--p)->act *= sum;        /* compute rel. neuron activations */  }                             /* by dividing them by their sum */  /* --- set result --- */  if (acts) {                   /* if an activation vector is given */    for (p += k = lvq->cnt; --k >= 0; )      acts[k] = (--p)->act;     /* copy the neuron activations */  }                             /* to the given vector */  return best;                  /* return index of winner neuron */}  /* lvq_exec() *//*--------------------------------------------------------------------*/int lvq_aggr (LVQNET *lvq, double *vec, double weight){                               /* --- aggregate a data vector */  int    i, k;                  /* prototype index, loop variable */  QPROT  *p;                    /* to traverse the prototypes */  double *c, *d;                /* center vector and its change */  double act, exp;              /* neuron activation and exponent */  assert(lvq);                  /* check the function arguments */  i = lvq_exec(lvq, vec, NULL); /* compute degrees of membership */  lvq->spw += weight;           /* sum weight of processed pattern */  exp = fabs(lvq->exp);         /* get the adaptation exponent */  if (exp == 0) exp = 1;        /* no special "winner takes all" */  for (p = lvq->qps +(k = lvq->cnt); --k >= 0; ) {    act = (--p)->act;           /* traverse the neurons */    if (act <= 0) continue;     /* skip unactivated neurons */    if      (exp == 2) act *= act;    else if (exp != 1) act = pow(act, exp);    p->spw += act *= weight;    /* count the training pattern */    c = p->chg; d = p->dif;     /* get change and difference vector */    for (i = lvq->dim; --i >= 0; )      c[i] += act *d[i];        /* agg. the weighted diff. vectors */    if (!(lvq->type & LVQ_SIZE))/* if there is no size parameter, */      continue;                 /* skip the size aggregations */    p->sqr += act *p->d2;       /* sum the squared distances */    if ((lvq->method & ~LVQ_REPLACE) == LVQ_DIST)      p->sum += act *sqrt(p->d2);  /* if size based on average dist. */  }                                /* sum the distances to the center */  return i;                     /* return index of the winner neuron */}  /* lvq_aggr() *//*--------------------------------------------------------------------*/double lvq_update (LVQNET *lvq){                               /* --- update a LVQ network */  int    i, k;                  /* loop variables */  QPROT  *p;                    /* to traverse the prototypes */  double *c, *d;                /* to traverse center and change */  double max = 0, t;            /* maximum change of a vector */  double lrcur;                 /* current learning rate */  double var = 0;               /* adapted uniform variance */  assert(lvq);                  /* check the function argument */  for (p = lvq->qps +(k = lvq->cnt); --k >= 0; ) {    if ((--p)->spw <= 0) {      /* traverse non-empty prototypes */      p->buf = p->var; continue; }    t = 1 /p->spw;              /* compute the learning rate */    lrcur = (lvq->lrcur < t) ? lvq->lrcur : t;    c = p->ctr; d = p->chg;     /* update the reference vectors */    for (i = lvq->dim; --i >= 0; ) {      c[i] += t = lrcur *d[i];  /* update the center coordinates */      t = fabs(t);              /* determine the maximal change */      if (t > max) max = t;     /* of a center coordinate and */      d[i] = 0;                 /* reinitialize the change */    }    if (lvq->type & LVQ_SIZE) { /* if to use prototype sizes */      i = lvq->method & ~LVQ_REPLACE;      if      (i == LVQ_WEIGHT) {        t = lvq->spw /(p->spw *lvq->dim);        p->buf = t *p->var; }   /* compute radius from rel. weight */      else if (i == LVQ_VAR) {        t = p->sqr   /(p->spw *lvq->dim);        p->buf = t; }           /* compute isotropic variance */      else if (i == LVQ_SQUARE) {        t = p->sqr /p->spw;     /* compute average squared distance */        p->buf = t; }      else {                    /* if (i == LVQ_DIST) */        t = p->sum /p->spw;     /* compute average distance */        p->buf = t *t;          /* and square it */      }      p->sqr = p->sum = 0;      /* reinit. the sum of (squared) */    }                           /* distances to the center */  }  if (lvq->type & LVQ_SIZE) {   /* if to use prototype sizes */    if (lvq->szscl < 0) {       /* if to unify the prototype size */      for (var = 0, p += k = lvq->cnt; --k >= 0; )        var += sqrt((--p)->buf);/* sum the prototype sizes */      var *= -lvq->szscl /lvq->cnt;      var *= var;               /* compute the average size */    }                           /* and the uniform variance */    for (p += i = lvq->cnt; --i >= 0; ) {      --p;                      /* traverse the prototypes */      if      (lvq->szscl == 0) var = p->var;      else if (lvq->szscl >  0) var = p->buf *lvq->szscl;      if (!(lvq->method & LVQ_REPLACE)) {        lrcur = lvq->lrcur *p->spw;        if (lrcur > 1) lrcur = 1;        var = p->var +lrcur *(var -p->var);      }                         /* compute the new variance */      p->var = (var < MINVAR) ? MINVAR : var;    }                           /* set or adapt the variance */  }                             /* of all prototypes */  for (p += i = lvq->cnt; --i >= 0; )    (--p)->spw = 0;             /* reinit. sum of pattern weights */  lvq->spw = 0;                 /* and the processed pattern weight */  lvq->steps++;                 /* increment the update step counter */  if       (lvq->decay < 0)     /* compute the next learning rate */    lvq->lrcur = lvq->lrate *pow(lvq->steps, lvq->decay);  else if ((lvq->decay > 0) && (lvq->decay < 1))    lvq->lrcur = lvq->lrate *pow(lvq->decay, lvq->steps);  return max;                   /* return the maximal change */}  /* lvq_update() *//*--------------------------------------------------------------------*/int lvq_desc (LVQNET *lvq, FILE *file, int mode, int maxlen){                               /* --- describe a d.s. network */  int   i, k;                   /* loop variables */  QPROT *p;                     /* to traverse the prototypes */  char  *indent = "";           /* indentation string */  assert(lvq && file);          /* check the function arguments */  /* --- print a header (as a comment) --- */  if (mode & LVQ_TITLE) {       /* if the title flag is set */    i = k = (maxlen > 0) ? maxlen -2 : 70;    fputs("/*", file); while (--i >= 0) fputc('-', file);    fputs("\n  vector quantization\n", file);    while (--k >= 0) fputc('-', file); fputs("*/\n", file);  }                             /* print a title header */  if (maxlen <= 0) maxlen = INT_MAX;  #ifdef LVQ_EXTFN              /* if to compile extended functions */  if (lvq->attmap) {            /* if based on an attribute set, */    fputs("lvqnet = {\n",file); /* start the neural network desc. */    indent = "  ";              /* indent the description itself */  }                             /* by two characters */  #endif  /* --- print scaling parameters --- */  nst_desc(lvq->nst, file, indent, maxlen);  /* --- print the function description --- */  fputs(indent,        file);   /* write the indentation */  fputs("function = ", file);   /* and the function name */  fputs((lvq->actfn == rf_gauss) ? "gauss" : "cauchy", file);  fprintf(file,"(%g,%g);\n",    /* write the function parameters */          lvq->params[0], lvq->params[1]);  /* --- print the normalization mode --- */  fputs(indent,        file);   /* write the indentation */  fputs("normmode = ", file);   /* and the norm. mode indicator */  switch (lvq->mode) {          /* evaluate the normalization mode */    case LVQ_SUM1: fputs("sum1", file); break;    case LVQ_MAX1: fputs("max1", file); break;    case LVQ_HARD: fputs("hard", file); break;    default:       fputs("none", file); break;  }                             /* print the norm. mode name */  if (lvq->wtarf > 0)           /* winner takes all radius factor */    fprintf(file, ", %g", lvq->wtarf);  fputs(";\n", file);           /* terminate the normalization mode */  /* --- print network parameters --- */  fputs(indent,       file);    /* write the indentation and */  fputs("params = {", file);    /* start the parameter section */  p = lvq->qps;                 /* traverse the prototypes */  for (i = 0; i < lvq->cnt; p++, i++) {

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -