⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 backprop.c

📁 足球机器人仿真组CMU97的源码
💻 C
📖 第 1 页 / 共 2 页
字号:
/************************************   Code for implementing the backpropagation algorithm for training   a fully-connected MLP neural network with 1 layer of hidden units.   Loosely translated from backprop.lisp by David Touretzky.   Compile with the command:  cc -o backprop backprop.c -lm      Justin Boyan, Oct 5 1993*************************************/#include "backprop.h"#ifndef FOR_USE   #include "main.h"   /* All the rest of the backprop stuff: not needed for use */extern  int NN_n_hid;extern  float ETA1;extern  float ETA2;extern  float GAMMA1;extern  float GAMMA2;#else#include "global.h" /* We're in the clienttrial directory                     */#endif/*** Set up the network ***/static int   n_pat;static float **train_in;static float **train_out;/*********/static float eta1, eta2;/* learning rates used in w1 and w2 weights */static float alpha=0.9;           /* momentum coefficient */static float gamma1, gamma2;           /* weight decay coefficients */static float randmax=0.5;         /* random weights are initialized in [-R,R) */static long  global_max_epochs;   /* Stores argument to train            */static long  epoch_counter=0;	  /* keeps track of # of training epochs */static long  best_epoch_counter=0;/* keeps track of best epoch number */static float epoch_error;	  /* keep track of total sum^2 error each epoch*/static float best_epoch_error;   /* keep track of the best epoch error so far */static int   epoch_error_num;    /* keeps track of number of misclassifications*/static int   best_epoch_error_num;    /* best number of misclassifications*/static float inp[NN_n_inp+1];     /* input unit activations -- +1 for bias */static float *hid0,*hid;        /* hidden unit before & after activ. fn. */static float out0[NN_n_out],out[NN_n_out];  /* output unit before & after activ. fn. */static float target[NN_n_out];          /* target output values */static float dout[NN_n_out],*dhid;/* delta vals used in backprop computation*/static float **w1;                 /* input->hidden weight matrix */static float **w2;     /* hidden->output weight matrix */static float **dw1;    /* accumulates weight changes to w1 per-epoch */static float **dw2;    /* accumulates weight changes to w2 per-epoch */static float **prev_dw1;/* previous epoch's change to w1 */static float **prev_dw2;/* previous epoch's change to w2 */static float **best_w1; /* w1 corresponding to best_epoch_error */static float **best_w2;  /* w2 corresponding to best_epoch_error */enum actfntype {sigm,line,gauss}/*  h_actfn = sigm, o_actfn = sigm;  /* activation fn to use on each layer */  h_actfn = sigm, o_actfn = line;  /* Fahlman suggests line for cont output*//*** Prototypes ***/#define sqr(x) ((x)*(x))#define rnd(lo,hi) ((lo) + ((hi)-(lo))*(rand()/2147483647.0))#ifndef FOR_USEvoid  test(char *source);void  train(char *source, int max_epochs);void  initialize();void  train_one_epoch();void  backward_pass();void  update_weights();void  write_weights(char *source);void  initialize_from_file(char *source);#endifvoid  forward_pass();void  load_weights(char *source);/***/float **allocate_2dim(int dim1, int dim2){  float **new_array;  new_array= new (float*)[dim1];  for (int i=0; i<dim1; i++){    new_array[i] = new float[dim2];    for (int j=0; j<dim2; j++){      new_array[i][j]=0;    }  }  return new_array;}void allocate_arrays(int patterns){  hid0 = new float[NN_n_hid];  hid  = new float[NN_n_hid+1];  dhid = new float[NN_n_hid];  w1 =       allocate_2dim(NN_n_inp+1,NN_n_hid);  dw1 =      allocate_2dim(NN_n_inp+1,NN_n_hid);  prev_dw1 = allocate_2dim(NN_n_inp+1,NN_n_hid);  best_w1 =  allocate_2dim(NN_n_inp+1,NN_n_hid);  w2 =       allocate_2dim(NN_n_hid+1,NN_n_out);  dw2 =      allocate_2dim(NN_n_hid+1,NN_n_out);  prev_dw2 = allocate_2dim(NN_n_hid+1,NN_n_out);  best_w2 =  allocate_2dim(NN_n_hid+1,NN_n_out);  train_in  = allocate_2dim(patterns,NN_n_inp);  train_out = allocate_2dim(patterns,NN_n_out);}void NN_initialize_to_use(char *source){  w1 = allocate_2dim(NN_n_inp+1,NN_n_hid);  w2 = allocate_2dim(NN_n_hid+1,NN_n_out);  hid0 = new float[NN_n_hid];  hid  = new float[NN_n_hid+1];    /* initialize bias units */  inp[NN_n_inp] = 1.0;  hid[NN_n_hid] = 1.0;  load_weights(source);}void NN_use(float *array){  /* Assumes that the array has room for (max n_inp,n_out) */  for (int i=0; i<NN_n_inp; i++){    inp[i] = (array[i]-INP_BASES[i])/INP_RANGES[i];  }  forward_pass();  for (int i=0; i<NN_n_out; i++){    array[i] = (out[i]*OUT_RANGES[i]) + OUT_BASES[i];  }}#ifndef FOR_USE   /** Only need forward_pass and load_weights if just the net **/void GetStampedName( char *name ){  char *outputName = "weights";  char date[100],weekday[10],month[10],temp[10];  int  day,hour,min,sec,year;  FILE *dateFile;  /*  if ( strcmp(BehaviorName,"Dunno") )      outputName = strdup(BehaviorName); */  /* Main.c does this                                            */  /* system("date > date.log");        /* Put the date in a file */  dateFile = fopen("date.log","r");  fscanf(dateFile,"%[^\n]",date);   /* Scan it in             */  fclose(dateFile);    sscanf(date,"%s %s %d %d:%d:%d %s %d",	 weekday,month,&day,&hour,&min,&sec,temp,&year);  sprintf(name,"%s-%s%d-%d:%d.dat",outputName,month,day,hour,min);}void test (char *source){  int p,i,o;  float error;  char WeightFileName[200];  if ( !strcmp(TEST_WEIGHTS,"none") )    GetStampedName(WeightFileName);  else    strcpy(WeightFileName,TEST_WEIGHTS);  w1 = allocate_2dim(NN_n_inp+1,NN_n_hid);  w2 = allocate_2dim(NN_n_hid+1,NN_n_out);  if ( LengthSourceFile(source, &n_pat) ) my_error("LengthSourceFile");  train_in  = allocate_2dim(n_pat, NN_n_inp);  train_out = allocate_2dim(n_pat, NN_n_out);  if ( InputSourceFile(source, train_in, train_out) )    my_error("InputSourceFile");  NN_initialize_to_use(WeightFileName);  error = 0;  epoch_error = 0;  epoch_error_num = 0;  for (p=0; p<n_pat; p++) {    for (i=0;i<NN_n_inp;i++) inp[i] = train_in[p][i]; /* set input vector */    forward_pass();    for (o=0;o<NN_n_out;o++) {      target[o] = train_out[p][o]; /* set target vector */      error = target[o]-out[o];      epoch_error += sqr(error);      if ( fabs(target[o]-out[o]) > .4 )	epoch_error_num++;    }  }  printf("total error = %7.4f, avg. error = %7.4f, %d errors out of %d.\n",	     epoch_error, 	     epoch_error/(n_pat*NN_n_out), epoch_error_num, n_pat);  for (i=0; i<n_pat; i++){    delete(train_in[i]);    delete(train_out[i]);  }/*  delete(train_in);    delete(train_out);*/}void train (char *source, int max_epochs, int argc, char **argv){  int i,h,o;  char WeightFileName[200];  global_max_epochs = max_epochs;  GetStampedName( WeightFileName );    if ( LengthSourceFile(source, &n_pat) ) my_error("LengthSourceFile");  allocate_arrays(n_pat);  eta1 = ETA1;  eta2 = ETA2;  gamma1 = GAMMA1;  gamma2 = GAMMA2;  if ( InputSourceFile(source, train_in, train_out) )    my_error("InputSourceFile");  if ( strcmp(INITIALIZE_FILE,"none") )    initialize_from_file(INITIALIZE_FILE);  else     initialize();  static float last_epoch_error = 0;  do {    train_one_epoch();    epoch_counter ++;    /* Save Best stuff */    if ( epoch_counter == 1 || epoch_error < best_epoch_error ){      best_epoch_counter   = epoch_counter;      best_epoch_error     = epoch_error;      best_epoch_error_num = epoch_error_num;      /* copy weights    */      /* inputs->hiddens */      for (i=0; i<NN_n_inp+1; i++) {	for (h=0; h<NN_n_hid; h++) {	  best_w1[i][h] = w1[i][h];	}      }      /* hiddens->output */      for (h=0; h<NN_n_hid+1; h++) {	for (o=0; o<NN_n_out; o++) {	  best_w2[h][o] = w2[h][o];	}      }    }    /* Write the best weights seen so far */    if (epoch_counter % SAVE_WGTS_FREQ == 0) {      write_weights(WeightFileName);    }    if (epoch_counter % DISPLAY_FREQ == 0) {      printf("Epoch %d:  tot err = %7.4f, avg err = %7.4f, num wrong = %d\n",	     epoch_counter, epoch_error, 	     epoch_error/(n_pat*NN_n_out), epoch_error_num);      if ( fabs(epoch_error - last_epoch_error) < .001 ) break;      last_epoch_error = epoch_error;    }  } while (epoch_counter < max_epochs && epoch_error >= 0.01);  printf("\nBackprop quit after epoch %d with error %7.4f (%d wrong)\n",	 epoch_counter, epoch_error, epoch_error_num);  FILE *result_compilation = fopen ("compilation.dat","a");  fprintf(result_compilation,"--%s: %d hiddens, eta = (%f,%f), gamma = (%f,%f)\n",	  WeightFileName, NN_n_hid, eta1, eta2, gamma1, gamma2);  fprintf(result_compilation,"Epoch %d:  tot err = %7.4f, avg err = %7.4f, num wrong = %d\n\n",	  best_epoch_counter, best_epoch_error, 	  best_epoch_error/(n_pat*NN_n_out), best_epoch_error_num);  fclose(result_compilation);  for (i=0; i<n_pat; i++){    delete(train_in[i]);    delete(train_out[i]);  }/*  delete(train_in);    delete(train_out);*/}void initialize(){  int i,h,o;  printf("Initializing %d->%d->%d network:\n", NN_n_inp,NN_n_hid,NN_n_out);  printf("\teta = (%f,%f), alpha = %f, gamma = (%f,%f), randmax = %f\n",	 eta1,eta2,alpha,gamma1,gamma2,randmax);  printf("\t%d training patterns\n", n_pat);  /* initialize bias units */  inp[NN_n_inp] = 1.0;  hid[NN_n_hid] = 1.0;  /* initialize input->hidden weights */  for (i=0; i<NN_n_inp+1; i++) {    for (h=0; h<NN_n_hid; h++) {      w1[i][h] = rnd(-randmax,randmax);      dw1[i][h] = 0.0;    }  }  /* initialize hidden->output weights */  for (h=0; h<NN_n_hid+1; h++) {    for (o=0; o<NN_n_out; o++) {      w2[h][o] = rnd(-randmax,randmax);      dw2[h][0] = 0.0;    }  }}void train_one_epoch (){  int i,h,o,p;  /* clear all weight deltas */  for (i=0; i<NN_n_inp+1; i++) for (h=0; h<NN_n_hid; h++) dw1[i][h]=0.0;  for (h=0; h<NN_n_hid+1; h++) for (o=0; o<NN_n_out; o++) dw2[h][o]=0.0;  epoch_error = 0.0;  epoch_error_num = 0;  for (p=0; p<n_pat; p++) {

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -