⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 layernet.c~

📁 统计模式识别算法包
💻 C~
📖 第 1 页 / 共 2 页
字号:
/*--------------------------------------------------------------------------------   trial - Compute the output for a given input by evaluating network--------------------------------------------------------------------------------*/void LayerNet::trial ( double *input ){   int i ;      if (! exe) {   // Should NEVER happen, but good style to aid debugging      error_message ( "Internal error in LayerNet::trial" ) ;      return ;      }   if (nhid1 == 0) {                // No hidden layer      for (i=0 ; i<nout ; i++)         out[i] = activity ( input , out_coefs+i*(nin+1) , nin ) ;      }   else if (nhid2 == 0) {           // One hidden layer      for (i=0 ; i<nhid1 ; i++)         hid1[i] = activity ( input , hid1_coefs+i*(nin+1) , nin ) ;      for (i=0 ; i<nout ; i++)         out[i] = activity ( hid1 , out_coefs+i*(nhid1+1) , nhid1 ) ;      }   else {                           // Two hidden layers      for (i=0 ; i<nhid1 ; i++)         hid1[i] = activity ( input , hid1_coefs+i*(nin+1) , nin ) ;      for (i=0 ; i<nhid2 ; i++)         hid2[i] = activity ( hid1 , hid2_coefs+i*(nhid1+1) , nhid1 ) ;      for (i=0 ; i<nout ; i++)         out[i] = activity ( hid2 , out_coefs+i*(nhid2+1) , nhid2 ) ;      }}/*--------------------------------------------------------------------------------   learn--------------------------------------------------------------------------------*/void LayerNet::learn ( TrainingSet *tptr , struct LearnParams *lptr ){   int i, try, n_escape, n_retry, bad_count ;   double err, prev_err, best_err, start_of_loop_error ;   char msg[80] ;   SingularValueDecomp *sptr ;   LayerNet *worknet, *bestnet ;   if (! exe) {   // Should NEVER happen, but good style to aid debugging      error_message ( "Internal error in LayerNet::learn" ) ;      return ;      }   n_escape = n_retry = 0 ;/*   Allocate scratch memory*/   MEMTEXT ( "LAYERNET::learn new worknet, bestnet" ) ;   worknet = new LayerNet ( outmod , nin , nhid1 , nhid2 , nout , 0 , 0 ) ;   bestnet = new LayerNet ( outmod , nin , nhid1 , nhid2 , nout , 0 , 1 ) ;   if ((worknet == NULL)  ||  (! worknet->ok)    || (bestnet == NULL)  ||  (! bestnet->ok)) {      memory_message ( "to learn" ) ;      if (worknet != NULL)         delete worknet ;      if (bestnet != NULL)         delete bestnet ;      neterr = 1.0 ;      return ;      }/*   Find initial weight estimates via simulated annealing or genetics   or simply regression if no hidden layers*/   if (lptr->init == 0)                             // NO INIT      neterr = trial_error ( tptr ) ;   if ((lptr->init == 1)  ||  (lptr->init == 2))    // ANNEAL      anneal ( tptr , lptr , worknet , 1 ) ;   else if (lptr->init == 3) {                      // GENETIC      if (nhid1)         gen_init ( tptr , lptr ) ;      else {         error_message ( "Genetic init pointless with no hidden layer" ) ;         neterr = 1.0 ;         goto FINISH ;         }      }   else if (lptr->init == 4) {                      // REGRESSION      if (nhid1) {         warning_message ( "REGRESS illegal if hidden layer.");         neterr = 1.0 ;         goto FINISH ;         }      MEMTEXT ( "LAYERNET: new SingularValueDecomp" ) ;      sptr = new SingularValueDecomp ( tptr->ntrain , nin+1 , 1 ) ;      if (! sptr->ok) {    // Indicates insufficient memory         memory_message("for regression. Try ANNEAL NOREGRESS." ) ;         neterr = 1.0 ;  // Flag failure to LayerNet::learn which called us         delete sptr ;         }      neterr = regress ( tptr , sptr ) ;      MEMTEXT ( "LAYERNET: delete SingularValueDecomp" ) ;      delete sptr ;      }   if (lptr->init  &&  neterr > 0.999999) // Memory allocation failure      goto FINISH ;/*   Initialization is done.  Learning loop is here.   First, do conjugate gradient optimization, finding local minimum.   Then anneal to break out of it.  If successful, loop back up to   do conjugate gradient again.  Otherwise restart totally random.*/   copy_weights ( bestnet , this ) ;   best_err = neterr ;   bad_count = 0 ;           // Handles flat local mins   for (try=1 ; ; try++) {      sprintf ( msg , "Try %d  (best=%lf):", try, 100.0 * best_err ) ;      normal_message ( msg ) ;      start_of_loop_error = neterr ;      err = conjgrad ( tptr , 1000 , 1.e-8 , lptr->quit_err ) ;      neterr = fabs ( err ) ; // err<0 if user pressed ESCape      sprintf ( msg , "  Gradient err=%lf", 100.0 * neterr ) ;      progress_message ( msg ) ;      if (neterr < best_err) {   // Keep track of best         copy_weights ( bestnet , this ) ;         best_err = neterr ;         }      if (err < lptr->quit_err) // err<0 if user pressed ESCape         break ;      i = try * 97 + 101 ;   // Insure new seed for anneal      if (i<0)         i = -i ;      slongrand ( (long) i ) ;      prev_err = neterr ;  // So we can see if anneal helped      anneal ( tptr , lptr , worknet , 0 ) ;      sprintf ( msg , "  Anneal err=%lf", 100.0 * neterr ) ;      progress_message ( msg ) ;      if (neterr < best_err) {  // Keep track of best         copy_weights ( bestnet , this ) ;         best_err = neterr ;         }      if (best_err < lptr->quit_err)         break ;      if (neterr < prev_err) { // Did we break out of local min?         if ((start_of_loop_error - neterr) < 1.e-3)            ++bad_count ;  // Avoid many unprofitable iters         else            bad_count = 0 ;         if (bad_count < 4) {            ++n_escape ;          // For user interest only            continue ;            // Escaped, so gradient learn again            }         }      if (++n_retry > lptr->retries)         break ;      progress_message ( "  RESTART" ) ;      zero_weights () ;  // Failed to break out, so retry random      anneal ( tptr , lptr , worknet , 1 ) ;      }FINISH:   copy_weights ( this , bestnet ) ;   MEMTEXT ( "LAYERNET::learn delete worknet, bestnet" ) ;   delete worknet ;   delete bestnet ;   sprintf ( msg , "%d successful escapes, %d retries", n_escape, n_retry ) ;   normal_message ( msg ) ;   return ;}/*--------------------------------------------------------------------------------   wt_save - Save weights to disk (called from WT_SAVE.CPP)   wt_restore - Restore weights from disk (called from WT_SAVE.CPP)--------------------------------------------------------------------------------*/int LayerNet::wt_save ( FILE *fp ){   int n ;   if (nhid1 == 0) {                // No hidden layer      n = nout * (nin+1) ;      fwrite ( out_coefs , n * sizeof(double) , 1 , fp ) ;      }   else if (nhid2 == 0) {           // One hidden layer      n = nhid1 * (nin+1) ;      fwrite ( hid1_coefs , n * sizeof(double) , 1 , fp ) ;      n = nout * (nhid1+1) ;      fwrite ( out_coefs , n * sizeof(double) , 1 , fp ) ;      }   else {                           // Two hidden layers      n = nhid1 * (nin+1) ;      fwrite ( hid1_coefs , n * sizeof(double) , 1 , fp ) ;      n = nhid2 * (nhid1+1) ;      fwrite ( hid2_coefs , n * sizeof(double) , 1 , fp ) ;      n = nout * (nhid2+1) ;      fwrite ( out_coefs , n * sizeof(double) , 1 , fp ) ;      }   if (ferror ( fp ))      return 1 ;   return 0 ;}void LayerNet::wt_restore ( FILE *fp ){   int n ;   if (nhid1 == 0) {                // No hidden layer      n = nout * (nin+1) ;      fread ( out_coefs , n * sizeof(double) , 1 , fp ) ;      }   else if (nhid2 == 0) {           // One hidden layer      n = nhid1 * (nin+1) ;      fread ( hid1_coefs , n * sizeof(double) , 1 , fp ) ;      n = nout * (nhid1+1) ;      fread ( out_coefs , n * sizeof(double) , 1 , fp ) ;      }   else {                           // Two hidden layers      n = nhid1 * (nin+1) ;      fread ( hid1_coefs , n * sizeof(double) , 1 , fp ) ;      n = nhid2 * (nhid1+1) ;      fread ( hid2_coefs , n * sizeof(double) , 1 , fp ) ;      n = nout * (nhid2+1) ;      fread ( out_coefs , n * sizeof(double) , 1 , fp ) ;      }   if (ferror ( fp ))      ok = 0 ;}

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -