⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 lvq.c

📁 LVQ - Learning Vector Quantization Demonstration Download xlvq Linux executable (128 kb) wlvq.ex
💻 C
📖 第 1 页 / 共 3 页
字号:
/*----------------------------------------------------------------------  File    : lvq.c  Contents: learning vector quantization with some extensions  Author  : Christian Borgelt  History : 17.02.2003 file created from file cluster.c            09.03.2003 first compilable version completed            11.03.2003 weight parameter added to function lvq_reg            15.03.2003 organization of quantization prototypes changed            15.05.2003 activation normalization modes added            06.08.2003 bug in mapping binary attributes fixed            11.08.2003 adapted to new module attmap            12.08.2003 adapted to new module nstats            15.08.2003 adapted to new module radfn----------------------------------------------------------------------*/#include <stdlib.h>#include <limits.h>#include <float.h>#include <math.h>#include <string.h>#include <assert.h>#include "lvq.h"/*----------------------------------------------------------------------  Preprocessor Definitions----------------------------------------------------------------------*/#define BLKSIZE      16         /* block size for prototype vector */#define MINVAR       1e-12      /* minimal variance *//*----------------------------------------------------------------------  Functions----------------------------------------------------------------------*/static void _init (LVQNET *lvq, int dim, int cnt){                               /* --- initialize some variables */  lvq->dim       = dim;  lvq->cnt       = cnt;  lvq->init      = 0;  lvq->reg       = lvq->spw = 0;  lvq->type      = LVQ_CENTER;  /* prototypes consist of centers only */  lvq->actfn     = rf_cauchy;   /* default activation function: */  lvq->params[0] = 2;           /* act(x) = 1/d^2(x,c) */  lvq->params[1] = 0;           /* (inverse sq. Euclidean distance) */  lvq->mode      = LVQ_HARD;  lvq->wtarf     = lvq->exp   = 0;  lvq->lrcur     = lvq->lrate = 0.01;  lvq->decay     = -0.01;  lvq->szscl     = 1;  lvq->steps     = 1;  lvq->qps       = NULL;        /* initialize pointers, */  lvq->vec       = NULL;        /* so that deletion */  lvq->nst       = NULL;        /* with lvq_delete works */}  /* _init() *//*--------------------------------------------------------------------*/static int _qprot (QPROT *p, int dim){                               /* --- init. a quant. prototype */  double *c;                    /* to traverse the change vector */  p->ctr = (double*)malloc(3 *dim *sizeof(double));  if (!p->ctr) return -1;       /* create and */  p->dif = p->ctr +dim;         /* organize the vectors, */   p->chg = p->dif +dim;         /* and initialize the fields */  p->var = 1;  p->sqr = p->sum = p->spw = 0;  for (c = p->chg +dim; --dim >= 0; )    *--c = 0;                   /* clear the change vector */  return 0;                     /* return 'ok' */}  /* _qprot() *//*--------------------------------------------------------------------*/LVQNET* lvq_create (int dim, int cnt){                               /* --- create a LVQ network */  int    i;                     /* loop variable */  LVQNET *lvq;                  /* created neural network */  QPROT  *p;                    /* to traverse the prototypes */  assert((dim > 0) && (cnt > 0));  /* check the function arguments */  lvq = (LVQNET*)malloc(sizeof(LVQNET));  if (!lvq) return NULL;        /* create the base structure */  _init(lvq, dim, cnt);         /* and initialize the variables */  lvq->qps = p = (QPROT*)malloc(cnt *sizeof(QPROT));  if (!lvq->qps) { lvq_delete(lvq); return NULL; }  for (p += i = lvq->cnt; --i >= 0; )    (--p)->ctr = NULL;          /* clear pointers for cleanup */  for (p += i = lvq->cnt; --i >= 0; )    if (_qprot(--p, dim) != 0){ /* init. the quantization prototypes */      lvq_delete(lvq); return NULL; }  lvq->vec = (double*)malloc(2 *dim *sizeof(double));  if (!lvq->vec) { lvq_delete(lvq); return NULL; }  lvq->buf = lvq->vec +dim;     /* create the numeric vectors */  lvq->nst = nst_create(dim);   /* and the numerical statistics */  if (!lvq->nst) { lvq_delete(lvq); return NULL; }  #ifdef LVQ_EXTFN              /* if extended function set, */  lvq->attset = NULL;           /* clear the attribute set and */  lvq->attmap = NULL;           /* attribute map pointer as */  #endif                        /* an indicator for normal LVQ */  return lvq;                   /* return the created neural network */}  /* lvq_create() *//*--------------------------------------------------------------------*/#ifdef LVQ_EXTFNLVQNET* lvq_createx (ATTSET *attset, int marked, int cnt){                               /* --- create a LVQ network */  LVQNET *lvq;                  /* created LVQ network */  ATTMAP *attmap;               /* created attribute map */  assert(attset && (cnt > 0));  /* check the function arguments */  attmap = am_create(attset, marked, -1);  if (!attmap) return NULL;     /* create an attribute map */  lvq = lvq_create(am_dim(attmap), cnt);  if (!lvq) { am_delete(attmap); return NULL; }  lvq->attset = attset;         /* create a neural network and */  lvq->attmap = attmap;         /* note the attribute set and map */  return lvq;                   /* return the created neural network */}  /* lvq_createx() */#endif/*--------------------------------------------------------------------*/void lvq_delete (LVQNET *lvq){                               /* --- delete a LVQ network */  int   i;                      /* loop variable */  QPROT *p;                     /* to traverse the prototypes */  assert(lvq);                  /* check the function argument */  #ifdef LVQ_EXTFN              /* if to compile extended functions, */  if (lvq->attmap) free(lvq->attmap);    /* delete the attribute map */  #endif  if (lvq->nst) nst_delete(lvq->nst);  if (lvq->vec) free(lvq->vec); /* delete the numeric vectors */  if (lvq->qps) {               /* if there is a prototypes vector */    for (p = lvq->qps +(i = lvq->cnt); --i >= 0; )      if ((--p)->ctr) free(p->ctr);    free(lvq->qps);             /* delete all centers and */  }                             /* the prototype vector */  free(lvq);                    /* delete the base structure */}  /* lvq_delete() *//*--------------------------------------------------------------------*/void lvq_value (LVQNET *lvq, int index, double value){                               /* --- set an input value */  lvq->vec[index] = (value -nst_offset(lvq->nst, index))                           *nst_factor(lvq->nst, index);}  /* lvq_value() *//*--------------------------------------------------------------------*/#ifdef LVQ_EXTFNvoid lvq_valuex (LVQNET *lvq, const TUPLE *tpl){                               /* --- register a training tuple */  assert(lvq);                  /* check the function arguments */  am_exec(lvq->attmap, tpl, AM_INPUTS, lvq->vec);  nst_norm(lvq->nst, lvq->vec, lvq->vec);}  /* lvq_valuex() */           /* normalize the input values */#endif/*--------------------------------------------------------------------*/void lvq_reg (LVQNET *lvq, double *vec, double weight){                               /* --- register a data point */  #ifdef LVQ_EXTFN              /* if to compile extended functions */  int i, k, off;                /* loop variables, offset */  #endif  assert(lvq);                  /* check the function argument */  nst_reg(lvq->nst, vec, weight);  #ifdef LVQ_EXTFN              /* if to compile extended functions */  if (vec || !lvq->attmap)      /* if not termination or normal LVQ, */    return;                     /* abort the function */  for (i = am_attcnt(lvq->attmap); --i >= 0; ) {    if (am_type(lvq->attmap, i) < 0) continue;    off = am_off(lvq->attmap, i);    k   = am_cnt(lvq->attmap, i) +off;    while (--k >= off) nst_scale(lvq->nst, k, 0, 1);  }                             /* set identity for symbolic attribs. */  #endif                        /* (prevent distortion by scaling) */}  /* lvq_reg() *//*--------------------------------------------------------------------*/#ifdef LVQ_EXTFNvoid lvq_regx (LVQNET *lvq, const TUPLE *tpl){                               /* --- register a training tuple */  assert(lvq);                  /* check the function arguments */  if (!tpl) {                   /* if to terminate registration */    lvq_reg(lvq, NULL, 0); return; }  am_exec(lvq->attmap, tpl, AM_INPUTS, lvq->vec);  lvq_reg(lvq, lvq->vec, tpl_getwgt(tpl));}  /* lvq_regx() */             /* set the data values and register */#endif/*--------------------------------------------------------------------*/void lvq_type (LVQNET *lvq, int type, double radius){                               /* --- set the cluster type */  int   i;                      /* loop variables */  QPROT *p;                     /* to traverse the prototypes */  assert(lvq);                  /* check the function arguments */  lvq->type = type;             /* note the prototype type */  if (radius <= 0) radius = 1;  /* check and adapt the initial radius */  radius *= radius;             /* and compute the variance */  for (p = lvq->qps +(i = lvq->cnt); --i >= 0; )    (--p)->var = radius;        /* note the initial variance */}  /* lvq_type() *//*--------------------------------------------------------------------*/void lvq_init (LVQNET *lvq, int mode, double range,               double randfn(void), double *vec){                               /* --- initialize a LVQ network */  int    i, k, n;               /* loop variables */  QPROT  *p;                    /* to traverse the prototypes */  NSTATS *nst;                  /* simple numerical statistics */  double *c, *b;                /* center vector, buffer */  double x, d, m;               /* coordinate buffers */  assert(lvq && randfn);        /* check the function arguments */  /* --- initialize other parameters --- */  if ((mode != LVQ_POINTS)      /* if not called a second time */  ||  (lvq->init <= 0) || (lvq->init >= lvq->cnt)) {    lvq->steps = 1;             /* init. the update step counter */    for (p = lvq->qps +(k = lvq->cnt); --k >= 0; ) {      (--p)->var = 1;           /* traverse the prototypes and */      p->sqr = p->sum = p->spw = 0;    /* init. the parameters */      for (c = p->chg +(i = lvq->dim); --i >= 0; )        *--c = 0;               /* traverse the coordinates changes */    }                           /* and clear them */  }  /* --- initialize reference vectors --- */  nst = lvq->nst;               /* get the numerical statistics */  switch (mode) {               /* evaluate the initialization mode */    case LVQ_CENTER:            /* -- center of the data space */      c = lvq->qps->ctr;        /* compute the center */      nst_center(nst, c);       /* of the data space */      for (p = lvq->qps +(k = lvq->cnt); --k > 0; )        vec_copy((--p)->ctr, c, lvq->dim);      break;                    /* copy the center to all prototypes */    case LVQ_DIAG:              /* -- diagonal of the data space */    case LVQ_LATIN:             /* -- latin hypercube sampling */      for (i = lvq->dim; --i >= 0; ) {        m = nst_max(nst, i);    /* compute value decrement */        d = (m -nst_min(nst, i)) / lvq->cnt;        x = m -0.5*d;           /* compute last value */        for (p = lvq->qps +(k = lvq->cnt); --k >= 0; ) {          (--p)->ctr[i] = x; x -= d; }      }                         /* set equally spaced values */      if (mode == LVQ_DIAG)     /* if only to set the diagonal, */        break;                  /* there is nothing else to be done */    /* case LVQ_LATIN: */       /* -- latin hypercube sampling */      p = lvq->qps;             /* shuffle elements of the vectors */      for (n = lvq->cnt; --n > 0; ) {        for (i = lvq->dim; --i >= 0; ) {          k = (int)((n+1) *randfn());          if      (k > n) k = n;   /* compute a random index in */          else if (k < 0) k = 0;   /* the remaining set of vectors */          x           = p[k].ctr[i];          p[k].ctr[i] = p[n].ctr[i];

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -