⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 bp.cpp

📁 bp算法是人工神经网络得一个经典得算法。这是bp算法得Visual C++版本
💻 CPP
字号:

#include "bp.h"
void   InitW(void);
void   GetInputs(int);
void   Compute(void);
double Sigmoid(double Net);
void   LoadSet(char *Fname,int state);
double CompErrors2(int lyr,int j,double dNETj);
void   CompErrors(void);
void   AdaptW(void);
void   SaveW(char *);
void   DispResults();
//----------------------------------------------------
void LoadSet(char *Fname,int state) {
FILE *PFILE;
int PGindx;
int x,mask;
int pat,i,j;
double inVal;
int NumPatBytes;   

PFILE = fopen(Fname,"r");    
if (PFILE==NULL){
   printf("\nUnable to open file \n");
   exit(0);
   }
fscanf(PFILE,"%d",&NumPatterns);
NumPatBytes= LayerSize[0] / 8; 
for (pat=0; pat<NumPatterns; pat++) {
   PGindx=0;
   for (i=0; i<NumPatBytes; i++) {
      fscanf(PFILE,"%x",&x);
      mask = 0x80;
      for (j=0; j<8; j++) {
         if ((mask & x) > 0) {
            InPattern[pat][PGindx]=1.0;
            }
          else {
            InPattern[pat][PGindx]=0.0;
            } /* endif */
         mask=mask/2; //printf("{%x}",mask);
         PGindx++;
      	  } /* endfor */
      } /* endfor */
   // Now get desired / expected values
   if(state==TRAIN){                           
     for (i=0; i<LayerSize[OutLayerIndx]; i++) {
         fscanf(PFILE,"%lf",&inVal);
         Desired[pat][i]=inVal;
	     } /* endfor */
    }//endif
   } /* endfor */
fclose(PFILE);
}


//----------------------------------------------------------
void LoadParms(char *PrmFileName){
FILE *PRMFILE;
int i;
   PRMFILE = fopen(PrmFileName,"r");    // batch
   if (PRMFILE==NULL){
      printf("\nUnable to open Parameter file: %s \n",PrmFileName);
          exit(0);
      }
   fscanf(PRMFILE,"%lf",&ETA);
   fscanf(PRMFILE,"%lf",&ALPHA);
   fscanf(PRMFILE,"%ld",&MAXITER);
   fscanf(PRMFILE,"%lf",&ERRTOL);
   fscanf(PRMFILE,"%d",&NumLayers);
   printf("\nNumber of layers=%d\n",NumLayers);
   for (i=0; i<NumLayers; i++) {
      fscanf(PRMFILE,"%d",&LayerSize[i]);
      printf("Number of neurons in layer %d = %d\n",i,LayerSize[i]);
      }
   OutLayerIndx = NumLayers-1;         // accomodate 0 org'd arrays
   fclose(PRMFILE);
   printf("PAUSING,PRESS ANY KEY TO CONTINUE\n");
   while (!kbhit());
  
   //} /* endif */
}

//---------------------------------------------------------------
void InitW(){
int i,j,k;
double zWT;
   //randomize();
   srand(6);
   printf("Initial Randomized weights.....");
   for (i=1; i<NumLayers; i++) {
      for (k=0; k<LayerSize[i]; k++) {
         for (j=0; j<=LayerSize[i-1]; j++) {   //  One extra for bias neuron
            zWT=(double)rand();
            zWT=zWT/2.0;
            W[i][j][k] =zWT/65536.0;  // random weight normalized to [0,0.2]
            Wprev[i][j][k]=0;;
            //printf("W(%d,%d,%d)=%lf ",i,j,k,W[i][j][k]);
            if (PrinterEcho)
              fprintf(FPRN,"W(%d,%d,%d)=%lf\n",i,j,k,W[i][j][k]);
            }
        
         if (PrinterEcho) fprintf(FPRN,"\n");
         }
      }
   printf("\n");
}

void GetInputs(int state){   //gaile
int i;
CurrPat++; // Update the current pattern
   CurrIter++;                            
   if (CurrPat>=NumPatterns){
     CurrPat=0;
     Epoch++;
      }//endif
for (i=0; i<LayerSize[0]; i++) {
    Neuron[0][i]=InPattern[CurrPat][i];   // Show it to the neurons
    }
}

//-----------------------------------------------------------
void Compute(){
int lyr;     // layer to calculate
int dNeuron; // dest layer neuron
int sNeuron; // src layer neuron
double SumNet;
//double out;

if (PrinterEcho && DebugOn){ fprintf(FPRN,"Iteration=%ld\n",CurrIter);}
for (lyr=1; lyr<NumLayers; lyr++) {
   Neuron[lyr-1][LayerSize[lyr-1]]=1.0; //force bias neuron output to 1.0
   for (dNeuron=0; dNeuron<LayerSize[lyr]; dNeuron++) {
      SumNet=0.0;
      for (sNeuron=0; sNeuron <= LayerSize[lyr-1]; sNeuron++) {  //add 1 for bias
         SumNet += Neuron[lyr-1][sNeuron] * W[lyr][sNeuron][dNeuron];
         if (PrinterEcho && DebugOn){
            fprintf(FPRN,"->Neuron[%d][%d]=%lf \n",
                       lyr-1, sNeuron,Neuron[lyr-1][sNeuron]);
            fprintf(FPRN,"->W[%d][%d][%d]=%lf \n",
                       lyr, sNeuron,dNeuron,W[lyr][sNeuron][dNeuron]);
            }//ENDIF
         }//ENDFOR
      Neuron[lyr][dNeuron] = Sigmoid(SumNet/*,Temperature*/);
      if (PrinterEcho /*&& DebugOn*/ && lyr==OutLayerIndx){
          fprintf(FPRN,"Neuron[%d][%d]=%lf   \n",
                                 lyr, dNeuron,Neuron[lyr][dNeuron]);
          }//ENDIF
      }//ENDFOR
   }//ENDFOR
if (PrinterEcho && DebugOn){ fprintf(FPRN,"\n");}
//return out;
}

//-----------------------------------------------------------
double CompErrors2(int lyr,int j,double dNETj){
int k;
double Delta, SUMk;
SUMk=0.0;
for (k=0; k<LayerSize[lyr+1]; k++) {
   SUMk += DELTAj[lyr+1][k] * W[lyr+1][j][k];
   } /* endfor */
Delta = dNETj * SUMk;
return Delta;
}

void CompErrors(){
//double dNETj;
//double MOMENTUM;
int/*  i,*/ j;
int LocalConvFlg=TRUE;
if (PrinterEcho)
  fprintf(FPRN,"\nEpoch=%ld CompErrors Pattern#=%d Iteration=%ld  ",Epoch,CurrPat,CurrIter);

for (j=0; j<LayerSize[OutLayerIndx]; j++) {
   ERROR[j] = (Desired[CurrPat][j] - Neuron[OutLayerIndx][j]);
   if (fabs(ERROR[j])>=ERRTOL) LocalConvFlg=FALSE;  // Any Nonconverged error kills
   AvgErr +=fabs(ERROR[j]);
   if (fabs(ERROR[j])  > WorstErr) WorstErr= fabs(ERROR[j]);
   if (CurrPat==(NumPatterns-1)) {
      AvgErr=AvgErr/NumPatterns;
      //printf("Epoch=%ld Eavg=%lf Eworst=%lf\n",Epoch,AvgErr, fabs(WorstErr) );
      if (ArchOn==AVERAGE) {
         if ((ARCGRAN*(Epoch/ARCGRAN)) == Epoch) { // only save ARCGRANth epochs
            fprintf(ARCHIVE,"%ld %lf %lf\n",Epoch,AvgErr, fabs(WorstErr) );
            } /* endif */
         }
      if (ArchOn==WORST) {
         if ((ARCGRAN*(Epoch/ARCGRAN)) == Epoch) { // only save alternate epochs
            fprintf(ARCHIVE,"%ld %lf\n",Epoch, fabs(WorstErr) );
            } /* endif */
         }//endif
      AvgErr=0.0;
      WorstErr=0.0;
      } 
    else {
      } /* endif */
   if (PrinterEcho) fprintf(FPRN,"ERROR[%d]=%lf\n  ",j,ERROR[j]);
   if (ArchOn==ALL) fprintf(ARCHIVE,"%ld %lf\n",CurrIter,fabs(ERROR[j]) );
   } /* endfor */

if (LocalConvFlg) {
   ConvCount++;      //Record that another consec pattern is within ERRTOL
   if (ConvCount==NumPatterns) {
      ConvergeFlg=TRUE;
      } /* endif */
   }
  else {
   ConvCount=0; //Start over. This pattern had an error out of tolerance
   } /* endif */
}



//----------------------------研究一下调整权值的时机问题。!!!!!!

void AdaptW(){
double dNETj;
double MOMENTUM;
int lyr, i, j;
unsigned int cmd;
unsigned int esf;                       // epoch scale factor

esf = NumPatterns *1000;
cmd = (unsigned int)(CurrIter/esf);    // for cmd every 1000 epochs
if ( (esf*(CurrIter/esf)) ==CurrIter ) {
      switch (cmd) {
         case 1:
            ETA += 3.00;      // bump up learning rate at epoch 1,000
            printf("New ETA =%lf\n",ETA);
            break;            // was 2.0
         case 2:
            ETA += 5.00;      // bump up learning rate at epoch 2,000
            printf("New ETA =%lf\n",ETA);
            break;            // was 4.5
         case 3:
            ETA += 8.00;      // bump up learning rate at epoch 3,000
            printf("New ETA =%lf\n",ETA);
            break;            // was 7.0
         case 4:
            ETA += 8.00;      // bump up learning rate at epoch 4,000
            printf("New ETA =%lf\n",ETA);
            break;            // was 8.0
         case 5:
            ETA += 8.00;      // bump up learning rate at epoch 5,000
            printf("New ETA =%lf\n",ETA);
            break;
         default:;
      } 
   } 

// APPLY BACKPROP
 
for (lyr=OutLayerIndx; lyr>0; lyr--) {
   for (j=0; j<LayerSize[lyr]; j++) {
      dNETj=Neuron[lyr][j] * (1 - Neuron[lyr][j]);
      if (lyr==OutLayerIndx) {
         ERROR[j] = (Desired[CurrPat][j] - Neuron[lyr][j]);
         DELTAj[lyr][j] = ERROR[j] * dNETj;
         }
       else {
         DELTAj[lyr][j] = CompErrors2(lyr,j,dNETj);
         }
      for (i=0; i<=LayerSize[lyr-1]; i++) {   // include bias
         DELTA_Wij[lyr][i][j] = ETA * DELTAj[lyr][j] * Neuron[lyr-1][i];
         MOMENTUM= ALPHA*(W[lyr][i][j] - Wprev[lyr][i][j]);
         Wprev[lyr][i][j]=W[lyr][i][j];
         W[lyr][i][j] = W[lyr][i][j]+ DELTA_Wij[lyr][i][j] + MOMENTUM;
         } /* endfor */
      } /* endfor */
   } /* endfor */

}


//------------------------------------------------------
void   SaveW(char *WgtName) {
int lyr,s,d;
double zWT;
FILE *WEIGHTFILE;

   WEIGHTFILE = fopen(WgtName,"w");
   if (WEIGHTFILE==NULL){
      printf("Unable to open weight file for output:%s\n",WgtName);
      exit(0);
      }
   printf("SAVING CALCULATED WEIGHTS......\n");
   fprintf(WEIGHTFILE,"0.00\n");                  // Threshold always 0
   fprintf(WEIGHTFILE,"%d\n",NumLayers);         // Number of layers
   for (lyr=0; lyr<NumLayers; lyr++) {            // Save topology
      fprintf(WEIGHTFILE,"%d\n",LayerSize[lyr]); // Number of neurons/layer
      }
   for (lyr=1; lyr<NumLayers; lyr++) {         // Start at 1st hidden
      for (d=0; d<LayerSize[lyr]; d++) {
         for (s=0; s<=LayerSize[lyr-1]; s++) { // One extra for bias 
            zWT=W[lyr][s][d];
            fprintf(WEIGHTFILE,"%.25lf\n",zWT);
            if (PrinterEcho)
              fprintf(FPRN,"\nW(%d,%d,%d)=%lf",lyr,s,d,W[lyr][s][d]);
            }
         if (PrinterEcho) fprintf(FPRN,"\n");
         }
      }
   fclose(WEIGHTFILE);
}

//--------------------------------------------------------------
void DispResults(){
printf("\n---------------------------------------\n");
if (ConvergeFlg) {
   printf("SUCCESS: Convergance has occured at iteration %ld\n",CurrIter);
   }
  else {
   printf("FAILURE: Convergance has NOT occured!!\n");
   } // endif
printf("ETA  = %lf\n",ETA);
printf("Alpha= %lf\n",ALPHA);
printf("\n----------------------------------------\n");
}

double Sigmoid(double Net){
return 1.0/(1.0 + exp(-Net));
}


int main(int argc, char *argv[])
{
if (PrinterEcho) FPRN=fopen("prnfile","w");
if (ArchOn) ARCHIVE=fopen("archive","w");
if (argc>3) {
   LoadParms(argv[2]);////////
   LoadSet(argv[1],TRAIN);//
   }
 else {
   printf("USAGE: BP TRAINING_FILE PARMS_FILE WEIGHT_FILE TEST_FILE\n");
   exit(0);
   }
InitW();
CurrIter=0;
//double tmp;
while ((!ConvergeFlg) && (CurrIter<MAXITER)) {
   GetInputs(TRAIN);
   Compute();
   CompErrors();
   AdaptW();
   }
DispResults();   // Show how we did
if (ConvergeFlg)
   SaveW(argv[3]);   // Save the weights for later use
if (PrinterEcho) fclose(FPRN);
if (ArchOn) fclose(ARCHIVE);

//now test the network.
//the first 9 is the learning patterns ,
//the last 9  is the changed patterns

int i,j;
CurrPat = -1;
printf("now test the bp ann network\n");
if (argc>4 && ConvergeFlg){
	LoadSet(argv[4],TEST);
	for(i=0;i<NumPatterns;i++){
		GetInputs(TEST);
        Compute();
		int temp=0;
		for(j=0;j<LayerSize[OutLayerIndx];j++){
			printf("%f\n",Neuron[OutLayerIndx][j]);//////////=======
			if(fabs(Neuron[OutLayerIndx][j]-1)<ERRTOL)
				temp=temp*2+1;
			  else if(fabs(Neuron[OutLayerIndx][j])<ERRTOL)
				  temp=temp*2;
			  else temp=-1;}
		if(temp>=0)
			printf("this pattern is %d \n",temp);
		 else printf("don't know this pattern\n");
	}//endfor
}//endif
return 0;
}

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -