⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 bp.c

📁 bp神经网络,反向传播神经网络!感觉还不错的
💻 C
📖 第 1 页 / 共 3 页
字号:
/* ************************************************** */
/* file bp.c:  contains the main program and network  */
/*             creation routines.                     */
/*                                                    */
/* Copyright (c) 1990-96 by Donald R. Tveter          */
/*                                                    */
/* ************************************************** */

#ifdef INTEGER
#include "ibp.h"
#else
#include "rbp.h"
#endif

/* built-in C functions */

extern int rand();
extern void srand();

#ifdef INTEGER
extern int scale();
extern REAL unscale();
#endif

extern WTTYPE rdr();
extern int pg(), printoutunits(), readch(), readint(), eval();
extern int readpats(), evalone(), printstats(), popfile(), pushfile();
extern REAL readreal();
extern char *readstr();
extern int loadpat(), run();
extern void menus(), texterror(), help(), restoreweights();
extern void saveweights(), parameters(), printweights();

char sysstr[129];     /* string passed to OS with ! command */

int bufferend;        /* index of last character in input line */
int bufferptr;        /* position of next character in buffer */
char buffer[BUFFSIZE];/* holds contents of one input line */
char trfiles[RXSTRSIZE];/* data file name(s) */
int ch;               /* general purpose character variable */
FILE *copy;           /* file pointer to copy file */
char copyflag;        /* + for copying, - for no copy */
int copyfilecount;    /* to number the copy files */
jmp_buf cmdloopstate; /* to save state in case of a SIGINT */
FILE *data;           /* file for original data */
char *datafile;       /* copy of the data file name saved here */

WTTYPE dbdeta;        /* the initial eta value for the DBD method */
WTTYPE kappa;         /* the DBD learning parameter */
WTTYPE decay;         /* the decay parameter for the DBD method */
WTTYPE etamax;        /* the maximum eta for the DBD method */
WTTYPE theta1;        /* the DBD parameter */
WTTYPE theta2;        /* 1 - theta1 */
WTTYPE noise;         /* noise parameter for dbd */

char deriv;           /* flags type of derivative to use */
char echo;            /* controls echoing of characters during input */
char emptystring;     /* for unused string values */

WTTYPE eta;           /* basic learning rate */
WTTYPE eta2;          /* learning rate for lower layers */
WTTYPE alpha;         /* momentum term */

char recurinit;       /* intialize input and output at start of training */
int extraconnect;     /* flags the use of connections between */
                      /* non-adjacent layers */
char *inputfile;      /* name of file to take extra commands from */
FILE *filestack[MAXFILES];  /* allows for nested reads from files */
int filetimes[MAXFILES];    /* number of times to read file */
int filestackptr;     /* has the index of the current file */
int format[MAXFORMAT];/* each value in format indicates where to put */
                      /* a blank for compressed output mode or a */
                      /* carriage return for real output */
int timestoread;      /* number of times to read an input file */
char incrementseed;   /* + advances seed, - resets to first value */
char informat;        /* controls format to read numbers */
WTTYPE initrange;     /* initial range of random weights */
WTTYPE kicksize;      /* the minimum size weights that are affected */
WTTYPE kickrange;     /* the range weights are initialized to */
char wtinitroutine;   /* flags weight initialization algorithm */

long iotime;          /* time used up printing */
short nlayers;        /* number of layers in network */
int ioconnects;       /* flags existence of input/output connections */
int originallypcc;    /* saves kind of net to start with */

LAYER *last;          /* has address of the output layer */
LAYER *start;         /* has address of the input layer */
int stmsize;          /* number of h layer units in a recurrent net */

int lastadd;          /* iteration where last node was added */
REAL lastadderr;      /* error at the last hidden unit addition */
int lastprint;        /* last iteration pattern responses printed */
int lastsave;         /* last time weights were saved */
int lineno;           /* counts lines for paging */
int maxhidden;        /* maximum number of hidden units allowed */
int maxiter;          /* maximum iterations when not benchmarking */
char netbuild;        /* the type of net to build */
char offby1;          /* + means out of date stats for training set */
char outformat;       /* controls format to print output */
char outstr[OUTSTRSIZE]; /* the output string */
int pagesize;         /* size of page for pg */
char runningflag;     /* +/- to print the "running . . ." message */
REAL pasterror;       /* error from window iterations ago */
int pccnet;           /* flags pseudo-cascade-correlation net */
char probtype;        /* flags general or classification pattern format */
char debugoutput;     /* turns on any debugging code I need */
int prevnpats;        /* previous number of patterns, initially 0 */
int printrate;        /* printrate when not benchmarking */
WTTYPE unknown;       /* value for x in compressed input */

REAL qpdecayh;        /* the quickprop weight decay hidden layer */
REAL qpdecayo;        /* the quickprop weight decay output layer */
WTTYPE qpeta;         /* the quickprop eta */
WTTYPE qpnoise;       /* quickprop noise (integer version only) */
WTTYPE mu;            /* the quickprop acceleration factor */
char qpslope;         /* flags using slope in quickprop all the time */

int readerror;        /* flags an error in reading a value */
int readingpattern;   /* flags reading pattern state */
WTTYPE classoff;      /* target value for class off value */
WTTYPE classon;       /* target value for class on value */
char ringbell;        /* flag to ring bell when finished */
int right;            /* number of training patterns learned */
DATA s[2][2];         /* contains stats on pattern learning */
DATA rs[2][2];        /* stats on recurrent pattern learning */
int saverate;         /* rate at which to save weights */
char saveonminimum;   /* flags saving weights on minimum of test set */
REAL minimumsofar;    /* lowest test set error so far */
unsigned seed;        /* seed for generating random weights */
SEEDNODE *seedstart;  /* the list of user defined seeds */
#ifdef LOOKUP
WTTYPE sigmoid[7808]; /* table to look up the sigmoid's value */
#endif

WTTYPE stdthresh;     /* the bias WEIGHT value */
WTTYPE hbiasact;      /* hidden layer bias activation VALUE */
WTTYPE obiasact;      /* output layer bias activation VALUE */
char biasset;         /* flags the setting of the bias WEIGHT */
char summary;         /* flags summary output mode */
int testpat;          /* pattern to skip when benchmarking; else 0 */
char *testfile;       /* file to take test patterns from */
WTTYPE toler;         /* tolerance based on targets */
REAL toloverall;      /* tolerance based on average error */
WTTYPE toosmall;      /* weights smaller than toosmall were removed */
#ifdef INTEGER
INT32 totaldiff;      /* totals errors to find average error per unit */
#else
REAL totaldiff;
#endif
int totaliter;        /* counts total iterations for the program */
char *trainfile;      /* file to take training patterns from */
REAL trigger;         /* trigger slope for dynamic node creation */
char update;          /* flags type of update rule to use */
char up_to_date_stats;/* + does an extra forward pass after update */
int window;           /* window over which trigger slope is computer */
int wrong;            /* number of training patterns unlearned */
char wtfilename[FILENAMESIZE]="weights"; /* whole name for weight file */
char *wtfile;         /* base file name for weights file */
int wtfilecount;      /* counts weight files as they are written */
char wtformat;        /* controls format to save and restore weights */
#ifdef INTEGER
char wtlimithit;      /* flags whether the limit has been hit */
#endif
int wttotal;          /* number of weights in the network */
int wtsinuse;         /* number of weights in use */
char zeroderiv;       /* flags zero deriviative when using A ap */

char ah;              /* hidden layer activation function */
char ao;              /* output layer activation function */
WTTYPE Dh;            /* hidden layer D */
WTTYPE Do;            /* output layer D */

REAL poft;            /* temporal difference previous value */

/* given a layer no. and unit no. locateunit returns the address */
UNIT *locateunit(layerno,unitno)
int layerno, unitno;
{int i;
 UNIT *u;
 LAYER *layer;
 
if (layerno < 0 || layerno > nlayers)
 {
  pg(stdout,"bad layer number\n");
  return(NULL);
 };
layer = start;
for(i=1;i<=(layerno-1);i++) layer = layer->next;
u = (UNIT *) layer->units;
while (u != NULL && u->unitnumber != unitno) u = u->next;
if (u == NULL)
 {
  sprintf(outstr,"there is no unit %3d in layer %3d\n",unitno,layerno);
  pg(stdout,outstr);
 };
return(u);     
}

#ifdef SYMMETRIC

INT32 wtaddress(i,j,biasunit,type,size) /* Returns the address of a */
int i,j;                                /* weight (1), olddw (2),   */
int biasunit;                           /* eta (3), total (4),      */
int type;                               /* or slope (5)             */
int size;                               /* One is created if it     */
                                        /* doesn't already exist.   */
{ int k;
  INT32 addr;
  UNIT *u;
  WTNODE *w;

if (biasunit) addr = (INT32) malloc(size);
else if (j >= i) addr = (INT32) malloc(size);
else /* the item already exists, so find its address */
 {
  u = locateunit(2,j);
  w = (WTNODE *) u->wtlist;
  k = 1;
  while (k < i)
   {
    w = w->next;
    k = k + 1;
   };
  if (type == 1) addr = (INT32) w->weight;
  else if (type == 2) addr = (INT32) w->olddw;
  else if (type == 3) addr = (INT32) w->eta;
  else if (type == 4) addr = (INT32) w->total;
  else if (type == 5) addr = (INT32) w->slope;
  else pg(stdout,"bad type in wtaddress\n");
 };
return(addr);
}

void setweight(w,i,j,biasunit) /* set initial values in w */
WTNODE *w;
short i, j;
int biasunit;
{WTTYPE *s;

s = (WTTYPE *) wtaddress(i,j,biasunit,1,WTSIZE);
*s = 0;
w->weight = s;
s = (WTTYPE *) wtaddress(i,j,biasunit,2,WTSIZE);
*s = 0;
w->olddw = s;
s = (WTTYPE *) wtaddress(i,j,biasunit,3,WTSIZE);
*s = eta;
w->eta = s;
s = (WTTYPE *) wtaddress(i,j,biasunit,5,WTSIZE);
*s = 0;
w->slope = s;
w->inuse = 1;
#ifdef INTEGER
w->total = (INT32 *) wtaddress(i,j,biasunit,4,sizeof(INT32));
#else
w->total = (REAL *) wtaddress(i,j,biasunit,4,sizeof(REAL));
#endif
}

#else

void setweight(w,i,j,biasunit) /* set initial values in w */
WTNODE *w;
short i,j;
int biasunit;
{
w->weight = 0;
w->olddw = 0;
w->slope = 0;
w->eta = dbdeta;
if (biasunit) w->inuse = 2; else w->inuse = 1;
}

#endif

LAYER *mklayer(prevlayer,n)  /* creates a layer of n units, pointers */
LAYER *prevlayer;            /* and weights back to the units in the */
int n;                       /* previous layer and links this new */
                             /* layer into the list of layers */
{UNIT *front, *p, *q, *bias, *prev, *ptr;
 WTNODE *wfront, *wprev, *w;
 LAYER *lptr;
 int i, j, count;

/* make a list of nodes in this layer */

count = 1;
front = (UNIT *) malloc(sizeof(UNIT));
front->unitnumber = count;
front->layernumber = nlayers;
front->oj = 0;
front->tj = 0;
front->translate = 0;
front->userscale = scale(1.0);
front->wtlist = NULL;
prev = front;
for(i=1;i<n;i++)
 {
  count = count + 1;
  ptr = (UNIT *) malloc(sizeof(UNIT));
  prev->next = ptr;
  ptr->unitnumber = count;
  ptr->layernumber = nlayers;
  ptr->wtlist = NULL;
  ptr->translate = 0;
  ptr->userscale = scale(1.0);
  front->oj = 0;
  front->tj = 0;
  prev = ptr;
 };
prev->next = NULL;

/* make a LAYER node to point to this list of units */

lptr = (LAYER *) malloc(sizeof(LAYER));
lptr->unitcount = n;
lptr->initialcount = n;
lptr->D = scale(1.0);
lptr->biasact = scale(1.0);
lptr->patstart[TRAIN] = NULL;
lptr->currentpat[TRAIN] = NULL;
lptr->patstart[TEST] = NULL;
lptr->currentpat[TEST] = NULL;
lptr->backlayer = prevlayer;
lptr->next = NULL;
lptr->units = front;   /* connect the list of units */

/* return if this is the input layer */

if (prevlayer == NULL) return(lptr);
prevlayer->next = lptr;

/* If we are working on a deeper layer, for every node in this layer, */
/* create a linked list back to units in the previous layer. */

i = 1;
q = front;
while (q != NULL) /* do a unit */
 {    
  j = 1;            /* handle first connection */
  p = (UNIT *) prevlayer->units;
  wfront = (WTNODE *) malloc(sizeof(WTNODE));
  wttotal = wttotal + 1;
  q->wtlist = wfront;
  wprev = wfront;
  wfront->backunit = p;
  setweight(wfront,i,j,0);
  p = p->next;
  while (p != NULL) /* handle rest of connections */
   {
    j = j + 1;
    w = (WTNODE *) malloc(sizeof(WTNODE));
    wttotal = wttotal + 1;
    wprev->next = w;
    w->backunit = p;
    setweight(w,i,j,0);
    wprev = w;
    p = p->next;
   };
  j = j + 1;
  bias = (UNIT *) malloc(sizeof(UNIT));   /* create a bias unit */
  bias->oj = scale(1.0);
  bias->inuse = 1;
  bias->layernumber = nlayers;
  bias->unitnumber = 32767;           /* bias unit is unit 32767 */
  w = (WTNODE *) malloc(sizeof(WTNODE)); /* connect to end of list */
  wttotal = wttotal + 1;
  wprev->next = w;
  w->backunit = bias;
  setweight(w,n+2,i,1);
  w->next = NULL;
  q = q->next;
  i = i + 1;
 };
return(lptr);
}

#ifndef SYMMETRIC

void connect(a,b,range)  /* add a connection from unit a to unit b */
UNIT *a, *b;             /* connections go in increasing order */
WTTYPE range;

{WTNODE *wnew, *w, *wprev;
 UNIT *wunit;
 int farenough;

wnew = (WTNODE *) malloc(sizeof(WTNODE));
wttotal = wttotal + 1;
wnew->eta = dbdeta;
wnew->weight = range * (rand() & 32767) / 32768;
if ((rand() & 32767) > 16383) wnew->weight = -wnew->weight;
wnew->olddw = 0;
wnew->slope = 0;
wnew->inuse = 1;
wnew->backunit = a;
w = (WTNODE *) b->wtlist;
wprev = NULL;
wunit = (UNIT *) w->backunit;
farenough = 0;                  /* insert the weight in order */
while (w != NULL && !farenough)
 if (wunit->layernumber > a->layernumber) farenough = 1;
 else if (wunit->layernumber == a->layernumber)
  while (w != NULL && !farenough)
   {
    if (wunit->unitnumber < a->unitnumber &&
        wunit->layernumber == a->layernumber)
     {
      wprev = w;
      w = w->next;
      wunit = (UNIT *) w->backunit;
     }
    else farenough = 1;
   }
  else
   {
    wprev = w;
    w = w->next;
    wunit = (UNIT *) w->backunit;
   };
if (wprev == NULL)
 {
  wnew->next = w;
  b->wtlist = wnew;
 }
else
 {
  wnew->next = w;
  wprev->next = wnew;
 };
}

void addunit(layerno,range)
int layerno;  /* add hidden unit to end of the layer */
WTTYPE range;
{
 LAYER *lptr, *prevlayer, *nextlayer;
 UNIT *u, *prevu, *p, *bias;
 WTNODE *wnode;
 int i, unitno;

lptr = start;
for (i=1;i <= (layerno - 1); i++) lptr = lptr->next;
unitno = lptr->unitcount;
lptr->unitcount = unitno + 1;
prevu = locateunit(layerno,unitno);
if (prevu == NULL) return;
u = (UNIT *) malloc(sizeof(UNIT));
prevu->next = u;
u->next = NULL;
u->unitnumber = unitno + 1;
u->layernumber = layerno;
u->inuse = 1;
u->wtlist = NULL;
if (layerno > 1)
 {
  bias = (UNIT *) malloc(sizeof(UNIT));
  bias->inuse = 1;
  bias->oj = scale(1.0);
  bias->layernumber = layerno;
  bias->unitnumber = 32767;           /* bias unit is unit 32767 */
  wnode = (WTNODE *) malloc(sizeof(WTNODE));
  wttotal = wttotal + 1;
  wnode->weight = range * (rand() & 32767) / 32768;
  if ((rand() & 32767) > 16383) wnode->weight = -wnode->weight;
  wnode->olddw = 0;
  wnode->slope = 0;
  wnode->eta = dbdeta;
  wnode->inuse = 2;
  wnode->next = NULL;
  wnode->backunit = bias;
  u->wtlist = wnode;
  prevlayer = lptr->backlayer;
  p = (UNIT *) prevlayer->units;
  while (p != NULL)
   {
    connect(p,u,range);
    p = p->next;
   };
 };
nextlayer = lptr->next;
p = (UNIT *) nextlayer->units;
while (p != NULL)
 {
  connect(u,p,range);
  p = p->next;
 };
lastadd = totaliter;
lastadderr = s[TOL][TRAIN].avgerr;
}      

#endif

void init()
{int i,j;
 SEEDNODE *snode;

poft = scale(0.0);     /* part of a temporal difference test */

alpha = scale(0.9);    /* gradient descent parameters */
eta = scale(0.5);
eta2 = eta;

decay = scale(0.5);    /* DBD parameters */
dbdeta = scale(0.5);
etamax = scale(30.0);
kappa = scale(0.5);
theta1 = scale(0.5);
theta2 = scale(1.0) - theta1;

mu = scale(1.75);      /* quickprop parameters */
qpdecayh = 0;
qpdecayo = 0;
qpeta = scale(0.5);
qpnoise = 0;
qpslope = '+';

classoff = 0;
classon = scale(1.0);
debugoutput = '-';
inputfile = "(none)";
bufferend = 0;
bufferptr = BUFFSIZE + 1;
ch = ' ';
copyflag = '-';
copyfilecount = 0;
deriv = 'd';
echo = '-';

extraconnect = 0;
format[0] = 0;
for(i=1;i<=MAXFORMAT-1;i++) format[i] = format[i-1] + 10;
incrementseed = '-';
informat = 'r';
initrange = scale(1.0);
kickrange = 0;
kicksize = 0;
wtinitroutine = '0';
lastadd = 0;

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -