⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 art2.cpp

📁 adaptive resonance theory 2 人工神经网络
💻 CPP
📖 第 1 页 / 共 2 页
字号:
#include <stdio.h>
#include <conio.h>
#include <math.h>
#include <stdlib.h>
#include <stdarg.h>
#include <string.h>
#include <ctype.h>

#define LAYER n->net
#define I n->net[0]
#define F1 n->net[1]
#define F2 n->net[2]
#define e 0.0001
#define FALSE 0
#define TRUE 1
#define UNIT_OFF 0.0
#define UNIT_ON  1.0
#define LINEAR    (afn)linear
#define SIGMOID   (afn)sigmoid
#define THRESHOLD (afn)threshold
#define GAUSSIAN  (afn)gaussian
#define ON_CENTER (afn)on_center_off_surround
#define RANDOM 0
#define TEST 1
#define DOT_PRODUCT (pfn)dot_product
#define TRANSFER    (pfn)one_to_one
#define COMPLETE 0
#define ONE_TO_ONE 1
#define NORMAL 2
#define VALUE 3
#define MAX_ALIASES 10

typedef struct
{
  int dimX;
  int dimY;
  float **invecs;
  float **outvecs;
}iop;

typedef struct
{
	char c;
	char v;
}aka;

typedef int (*afn)(); /*type of activation functions  */

typedef void (*pfn)(); /*type of propagation function  */

typedef struct   /* the generic layer structure  */
{
  int units;    /* count of units on layer */
  int inputs;   /* count of units feeding this layer */
  int processed;  /* index value on some layers  */
  float modifier; /* modifier for activation function */
  float initval;  /* used to initialize some connections  */
  float *outputs; /* pointer to array of output values  */
  float **connects; /* pointer to array of pointers for wts */
  afn activation;  /* activation function for the layer */
  pfn propto;      /* propagation function for layer */
}layer;

typedef struct
{
  float *w;
  float *x;
  float *v;
  float *u;
  float *p;
  float *q;
  float *r;
}sublayer;

typedef struct
{
  int layers;  /* number of layers in the network */
  int exemplars; /* number of training pairs in network */
  float A,B,C,D,theta,rho;  /* ART Learning parameters */
  float *errs;  /* array of error values at output */
  iop *patterns; /*training pairs structure pointer */
  layer **net;  /* use the basic network structure */
  sublayer f1;  /* F1 sublayer structure */
  char filename[40]; /* default name for network file */
}art2;

int sigmoid(layer *l)
{
  float tau,*outs;
  int i,units;
  tau = l->modifier;
  outs = l->outputs;
  units = l->units;
  for(i=0;i<units;i++)
	  outs[i] = (1.0/(1.0+exp(-(outs[i]*tau))));
  return (units);
}

int sigmoid_derivative(layer *l)
{
  float *outs, *derivs;
  int i,units;
  outs = l->outputs;
#ifdef BPN 
  derivs[i] = l->errors;
#endif
  units = l->units;
  for(i=0;i<units;i++)
	  derivs[i] = outs[i]*(1.0 - outs[i]);
  return (units);
}

int linear(layer *l)
{
  float scale,*outs;
  int i,units;
  scale = l->modifier;
  outs = l->outputs;
  units = l->units;
  for(i=0;i<units;i++)
	  outs[i] = scale*outs[i];
  return(units);
}

int linear_derivative(layer *l)
{
  float *derivs;
  int i,units;
  units = l->units;
  for(i=0;i<units;i++)
	  derivs[i] = 1.0;
  return (units);
}

int threshold(layer *l)
{
  float trigger,*outs;
  int i,units;
  trigger = l->modifier;
  outs = l->outputs;
  units = l->units;
  for(i=0;i<units;i++)
  {
	  if(outs[i] >= trigger)
		  outs[i] = UNIT_ON;
	  else 
		  outs[i] = UNIT_OFF;
  }
  return (units);
}

int on_center_off_surround(layer *l)
{
  float max,*outs;
  int i,winner,units;
  max = -100000.0;
  outs = l->outputs;
  units = l->units;
  for(i=0;i<units;i++)
  {
    if(outs[i] > max)
	{
	  max = outs[i];
	  winner = i;
	}
	outs[i] = UNIT_OFF;
  }
  outs[winner] = UNIT_ON;
  return (winner);
}
/*
void activate(layer *layer)
{
  layer->processed = layer->activation(layer);
} */

void dot_product(layer *fromlayer,layer *tolayer)
{
  int i,j;
  float *wts,*outs,*ins,**connects;
  outs = tolayer->outputs;
  connects = tolayer->connects;
  for(i=0; i<tolayer->units; i++)
  {
    outs[i] = 0;
	wts = connects[i];
	ins = fromlayer->outputs;
	for(j=0; j<fromlayer->units; j++)
		outs[i] = outs[i] + (wts[j]*ins[j]);
  }
}

void one_to_one(layer *fromlayer, layer *tolayer)
{
  int i,units;
  float *outs,*ins;
  outs = tolayer->outputs;
  ins = fromlayer->outputs;
  units = tolayer->units;
  for(i=0;i<units;i++)
	  outs[i] = ins[i];
}

void set_random_weights(layer *layer)
{
  float **connects,*wts;
  int i,j,incnt,tocnt;
  connects = layer->connects;
  if(!connects)
	  return;
  incnt = layer->inputs;
  tocnt = layer->units;
  for(i=0;i<tocnt;i++)
  {
    wts = connects[i];
	for(j=0;j<incnt;j++)
		wts[j] = ((float)rand() / 32767.0) - 0.5;
  }
}

void set_test_weights(layer *layer)
{
  float **connects,*wts;
  int i,j,incnt,tocnt;
  connects = layer->connects;
  if(!connects)
	  return;
  incnt = layer->inputs;
  tocnt = layer->units;
  for(i=0;i<tocnt;i++)
  {
    wts = connects[i];
	for(j=0;j<incnt;j++)
		wts[j] = (float)i + ((float)j / 1000.0);
  }
}

void set_normal_weights(layer *layer)
{
  float **connects,*wts,wtval;
  int i,j,incnt,tocnt;
  connects = layer->connects;
  if(!connects)
	  return;
  incnt = layer->inputs;
  tocnt = layer->units;
  wtval = 1 / sqrt(incnt);
  for(i=0;i<tocnt;i++)
  {
    wts = connects[i];
	for(j=0;j<incnt;j++)
		wts[j] = wtval;
  }
}

void set_value_weights(layer *layer)
{
  float **connects,*wts,wtval;
  int i,j,incnt,tocnt;
  connects = layer->connects;
  if(!connects)
	  return;
  incnt = layer->inputs;
  tocnt = layer->units;
  wtval = layer->initval;
  for(i=0;i<tocnt;i++)
  {
    wts = connects[i];
	for(j=0;j<incnt;j++)
		wts[j] = wtval;
  }
}

void set_weights(layer *l,int how)
{
  if(how == RANDOM)
	  set_random_weights(l);
  if(how == TEST)
	  set_test_weights(l);
  if(how == NORMAL)
	  set_normal_weights(l);
  if(how == VALUE)
	  set_value_weights(l);
}

int *define_layers(int layers,...)
{
  va_list argptr;
  int *l,i;
  l = (int *)calloc(layers+1,sizeof(int));
  l[0] = layers;
  va_start(argptr,layers);
  for(i=1;i<=layers;i++)
	  l[i] = va_arg(argptr,int);
  va_end(argptr);
  return l;
}

layer *build_layer(int units)
{
  layer *l;
  l = (layer *)calloc(1,sizeof(layer));
  l->units = units;
  l->inputs = 0;
  l->modifier = 0.0;
  l->initval = 1.0;
  l->outputs = (float *)calloc(units,sizeof(float));
  l->connects = NULL;
  l->activation = (afn)linear;
  l->propto = (pfn)dot_product;
  return l;
}

void set_activation(layer *l,afn activation,float modifier)
{
  l->activation = activation;
  l->modifier = modifier;
}

void set_propagation(layer *l,pfn netx)
{
  l->propto = netx;
  set_activation(l,LINEAR,1.0);
}

void connect_layers(layer *inlayer, layer *tolayer)
{
  register int i;
  tolayer->inputs = inlayer->units;
  tolayer->connects = (float **)calloc(tolayer->units,sizeof(float *));
  for(i=0;i<tolayer->units;i++)
	  tolayer->connects[i] = (float *)calloc(tolayer->inputs,sizeof(float));
}

void connect(layer *inlayer,layer *tolayer,int how,int init)
{
  if(how == COMPLETE)
  {
    connect_layers(inlayer,tolayer);
	set_propagation(tolayer,DOT_PRODUCT);
  }
  else
  {
	  if(how == ONE_TO_ONE) set_propagation(tolayer,TRANSFER);
  }
  set_weights(tolayer,init);
}

layer **build_net(int layers,int *sizes)
{
  layer **n;
  register int i;
  n = (layer **)calloc(layers,sizeof(layer *));
  for(i=0;i<layers;i++)
	  n[i] = build_layer(sizes[i]);
  return n;
}

void destroy_net(int layers,layer **n)
{
  int i,j;
  float *wts;
  layer *l;
  for(i=0;i<layers;i++)
  {
    l = n[i];
	if(l->outputs != NULL)
		free(l->outputs);
	if(l->connects != NULL)
		for(j=0;j<l->units;j++)
		{
		  wts = l->connects[j];
		  if(wts != NULL)
			  free(wts);
		}
	free (l);
  }
  free (n);
}

void show_weights(layer *layer)
{
  float **connects,*wts;
  int i,j,incnt,tocnt;
  connects = layer->connects;
  if(!connects)
  {
    printf("Layer has no connections.\n");
	return;
  }
  incnt = layer->inputs;
  tocnt = layer->units;
  for(i=0;i<tocnt;i++)
  {
    wts = connects[i];
	for(j=0;j<incnt;j++)
	{
	  printf("Wt[%d][%d] = ",i,j);
	  if(wts[j] < 0)
		  printf("%5.3f",wts[j]);
	  else
		  printf("%5.3f",wts[j]);
	}
	printf("\n");
  }
}

void show_outputs(layer *layer)
{
  int i;
  for(i=0;i<layer->units;i++)
  {
    if(layer->outputs[i] < 0)
		printf("Out[%d] = %5.3f",i,layer->outputs[i]);
	else
		printf("Out[%d] =  %5.3f",i,layer->outputs[i]);
  }
  printf("\n");
}

void show_targets(float *t,int how_many)
{
  int i;
  for(i=0;i<how_many;i++)
  {
    if(t[i] < 0)
		printf("SB[%d] = %5.3f",i,t[i]);
	else
		printf("SB[%d] =  %5.3f",i,t[i]);
  }
  printf("\n");
}

aka **alias;  // array of pointers to aliases
int akacnt;   // global holds aka count

int position(char c,char *s)
{
  int i;
  for(i=0;i<strlen(s);i++)
  {  if(s[i] == c)
		  return i;
  }
  return -1;
}

int is_float(char *s)
{
  int i=0;
  while(s[i] <= ' ') i++;
  for(;position(s[i],"0123456789.+-eE") >= 0;i++)
  {}
  if((i >= strlen(s)) || (position(s[i],")\0") >= 0))
  {
    s[i] = '\0';
	return 1;
  }
  return 0;
}

int is_assignment(char *s)
{
  return(position('=',s) > -1);
}

int is_pattern(char *s)
{
  int rp,lp;
  rp = position('(',s);
  lp = position(')',s);
  return ((rp < lp) && (rp > -1));
}

int is_alias(char c)
{
  int i;
  for(i=0;i<akacnt;i++)
  {
    if(alias[i]->c == c)
		return 1;
  }
  return 0;
}

char *substr(char *s,int pos,int bytes)
{
  char *result;
  result = (char *)calloc(bytes,sizeof(char));
  strncpy(result,(char *)s[pos],bytes);
  return result;
}

float getalias(char c)
{
  int i;
  for(i=0;i<akacnt;i++)
  {
    if(alias[i]->c == c)
		return (alias[i]->v);
  }
  return 0;
}

int translate(char *s,float *f)
{
  int index = 0;
  while(s[index] <= ' ') index++;
  if(is_alias(s[index]))
  {
    *f = getalias(s[index]);
	return (index+1);
  }
  if(is_float(s))
  {
    index = strlen(s);
	*f = (float)strtod(s,NULL);
	return (index+1);
  }
  printf("\nERROR:Could not convert value %s",s);
  exit(0);
  return 0; //dummy instruction to avoid compiler warning
}

int dimension(char *s)
{
  float f;
  int i,len,valid,count=0;
  len = strlen(s) - 1;
  for(i=1,valid=0;i<len;)
  {
    valid = translate((char *)&s[i],&f);
	if(valid)
	{
	  count += 1;

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -