⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 basicbpn.c

📁 BP神经网络程序
💻 C
📖 第 1 页 / 共 2 页
字号:
/******************************************************************************

                      ====================================================
        Network:      Backpropagation Network with Bias Terms and Momentum
                      ====================================================

        Application:  Time-Series Forecasting
                      Prediction of the Annual Number of Sunspots

 ******************************************************************************/




/******************************************************************************
                            D E C L A R A T I O N S
 ******************************************************************************/

#include <stdlib.h>
#include <stdio.h>
#include <math.h>
#include <string.h>
#include <conio.h>

typedef int           BOOL;
typedef int           INT;
typedef double        REAL;

#define FALSE         0
#define TRUE          1
#define NOT           !
#define AND           &&
#define OR            ||

#define MIN_REAL      -HUGE_VAL
#define MAX_REAL      +HUGE_VAL
#define MIN(x,y)      ((x)<(y) ? (x) : (y))
#define MAX(x,y)      ((x)>(y) ? (x) : (y))

#define LO            0.1
#define HI            0.9
#define BIAS          1

#define sqr(x)        ((x)*(x))


typedef struct {                     /* A LAYER OF A NET:                     */
        INT           Units;         /* - number of units in this layer       */
        REAL*         Output;        /* - output of ith unit                  */
        REAL*         Error;         /* - error term of ith unit              */
        REAL**        Weight;        /* - connection weights to ith unit      */
        REAL**        WeightSave;    /* - saved weights for stopped training  */
        REAL**        dWeight;       /* - last weight deltas for momentum     */
} LAYER;

typedef struct {                     /* A NET:                                */
        LAYER**       Layer;         /* - layers of this net                  */
        LAYER*        InputLayer;    /* - input layer                         */
        LAYER*        OutputLayer;   /* - output layer                        */
        REAL          Alpha;         /* - momentum factor                     */
        REAL          Eta;           /* - learning rate                       */
        REAL          Gain;          /* - gain of sigmoid function            */
        REAL          Error;         /* - total net error                     */
} NET;


/******************************************************************************
        R A N D O M S   D R A W N   F R O M   D I S T R I B U T I O N S
 ******************************************************************************/


void InitializeRandoms()
{
  srand(4711);
}


INT RandomEqualINT(INT Low, INT High)
{
  return rand() % (High-Low+1) + Low;
}      


REAL RandomEqualREAL(REAL Low, REAL High)
{
  return ((REAL) rand() / RAND_MAX) * (High-Low) + Low;
}      


/******************************************************************************
               A P P L I C A T I O N - S P E C I F I C   C O D E
 ******************************************************************************/


#define NUM_LAYERS    3
INT                   N=30, Hidden=20, M=1;
INT                   Units[NUM_LAYERS]={30,20,1};

#define FIRST_YEAR    1700
#define LAST_YEAR     1979
#define NUM_YEARS     280

#define TARGET_ERROR  0.0008
#define MAXEPOCHS     400

#define TrainSetRate  0.75


REAL                  TrainError;
REAL                  TestError;

INT                   DataAmount;
INT                   DataSetCount;

INT                   TrainSetCount;
INT                   TestSetCount;
REAL**                TrainInput;
REAL**                TrainTarget;
REAL**                TestInput;
REAL**                TestTarget;
REAL*                 Max;
REAL*                 Min;

FILE*                 f;

BOOL ReadData()     /* Read the Data File*/
{
   FILE* Dataf;
   char  DataFileName[20];
   char  SaveFileName[30]={"BasicBPN_"};
   INT   p,q;
   REAL  ForCount;
   REAL  data;

   printf("Please input the datefile name (for example sunspot.txt) and press ENTER key \n: ");
   scanf("%s",DataFileName);

   if ((Dataf = fopen(DataFileName, "r")) == NULL) {
       printf("Cannot open %s\n", DataFileName);
	   return FALSE;
   }

   DataAmount   =0;
   DataSetCount =0;
   while(NOT feof(Dataf))
   {
	  if(DataAmount < 3){
		 fscanf(Dataf, "%d", &N);
	     fscanf(Dataf, "%d", &Hidden);
	     fscanf(Dataf, "%d", &M);
         Units[0] = N; Units[1] = Hidden; Units[2] = M; 
         DataAmount +=3;
	  }else{
         fscanf(Dataf,"%lf",&ForCount);
		 DataAmount++;
	  }
	  
   }

   if((DataAmount-3)%(N+M)!=0)
   {
	   printf("Your datafile format is wrong,please check it.");
	   return FALSE; 
   }
   DataSetCount  = (DataAmount-3)/(N+M);
   TrainSetCount = (INT) floor(DataSetCount*TrainSetRate);
   TestSetCount  = DataSetCount-TrainSetCount;
   TrainInput    = (REAL**)  calloc(TrainSetCount+1, sizeof(REAL*));
   TrainTarget   = (REAL**)  calloc(TrainSetCount+1, sizeof(REAL*));
   TestInput     = (REAL**)  calloc(TestSetCount+1, sizeof(REAL*));
   TestTarget    = (REAL**)  calloc(TestSetCount+1, sizeof(REAL*));  
   for (p=1;p<=TrainSetCount;p++){
	   TrainInput[p]   = (REAL*)  calloc(N+1, sizeof(REAL));
	   TrainTarget[p]  = (REAL*)  calloc(M+1, sizeof(REAL));   
   }
   for (p=1;p<=TestSetCount;p++){
	   TestInput[p]    = (REAL*)  calloc(N+1, sizeof(REAL));
	   TestTarget[p]   = (REAL*)  calloc(M+1, sizeof(REAL));   
   }

   rewind(Dataf); 
   fscanf(Dataf, "%d", &N);
   fscanf(Dataf, "%d", &Hidden);
   fscanf(Dataf, "%d", &M);
   while(NOT feof(Dataf)){
	   for (p=1; p<=TrainSetCount; p++){
		   for (q=1; q<=N; q++){
			   fscanf(Dataf,"%lf",&data);
			   TrainInput[p][q]=data;
		   }
		   for (q=1; q<=M; q++){
			   fscanf(Dataf,"%lf",&data);
			   TrainTarget[p][q]=data;
		   }
	   }

	   for (p=1; p<=TestSetCount; p++){
		   for (q=1; q<=N; q++){
			   fscanf(Dataf,"%lf",&data);
			   TestInput[p][q]=data;
		   }
		   for (q=1; q<=M; q++){
			   fscanf(Dataf,"%lf",&data);
			   TestTarget[p][q]=data;
		   }
	   }		   
   }

   fclose(Dataf);
   strcat(SaveFileName,DataFileName);
   f = fopen(SaveFileName, "w");
   return TRUE;
}


void NormalizeData()  /*normalize the date of sunspots*/
{
  INT  p,q;

  Max = (REAL*)  calloc((N+M+1), sizeof(REAL));
  Min = (REAL*)  calloc((N+M+1), sizeof(REAL));

  for (q=1; q<=(N+M); q++){
	  Max[q] = MIN_REAL;
	  Min[q] = MAX_REAL;
  }

  for (q=1; q<=N; q++){
	  for (p=1; p<=TrainSetCount; p++){
		  Max[q] = MAX(Max[q],TrainInput[p][q]);
		  Min[q] = MIN(Min[q],TrainInput[p][q]);
	  }
	  for (p=1; p<=TestSetCount; p++){
		  Max[q] = MAX(Max[q],TestInput[p][q]);
		  Min[q] = MIN(Min[q],TestInput[p][q]);
	  }
  }
  
  for (q=N+1; q<=N+M; q++){
	  for (p=1; p<TrainSetCount; p++){
		  Max[q] = MAX(Max[q],TrainTarget[p][q-N]);
		  Min[q] = MIN(Min[q],TrainTarget[p][q-N]);
	  } 
	  for (p=1; p<TestSetCount; p++){
		  Max[q] = MAX(Max[q],TestTarget[p][q-N]);
		  Min[q] = MIN(Min[q],TestTarget[p][q-N]);
	  }  
  }

  for (q=1; q<=N; q++){
	  for (p=1; p<=TrainSetCount; p++){
		  TrainInput[p][q]    = (TrainInput[p][q] - Min[q])/(Max[q] - Min[q])*(HI - LO) + LO;   
	  }
	  for (p=1; p<=TestSetCount; p++){
          TestInput[p][q]     = ( TestInput[p][q] - Min[q])/(Max[q] - Min[q])*(HI - LO) + LO;
	  }
  }
  
  for (q=N+1; q<=N+M; q++){
	  for (p=1; p<=TrainSetCount; p++){
		  TrainTarget[p][q-N] = (TrainTarget[p][q-N] - Min[q])/(Max[q] - Min[q])*(HI - LO) + LO;
	  } 
	  for (p=1; p<=TestSetCount; p++){
		  TestTarget[p][q-N]  = (TestTarget[p][q-N] - Min[q])/(Max[q] - Min[q])*(HI - LO)+ LO;
	  }  
  }

}

void ReverseNormalizeData(REAL* Data, INT tag)
{
   INT q;

   if(tag==1){
     for (q=1; q<=M; q++){
	   Data[q] = (Data[q] - LO)/(HI - LO) * (Max[q+N] - Min[q+N]) + Min[q+N];
	 }
   }else{
	 for (q=0; q<M; q++){
	   Data[q] = (Data[q] - LO)/(HI - LO) * (Max[q+N+1] - Min[q+N+1]) + Min[q+N+1];
	 }
   }
}


void InitializeApplication(NET* Net)
{

  Net->Alpha = 0.5;
  Net->Eta   = 0.05;
  Net->Gain  = 1;

  NormalizeData();
}


void FinalizeApplication(NET* Net)
{
  fclose(f);
}


/******************************************************************************
                          I N I T I A L I Z A T I O N
 ******************************************************************************/


void GenerateNetwork(NET* Net)
{
  INT l,i;

  Net->Layer = (LAYER**) calloc(NUM_LAYERS, sizeof(LAYER*));
   
  for (l=0; l<NUM_LAYERS; l++) {
    Net->Layer[l] = (LAYER*) malloc(sizeof(LAYER));
      
    Net->Layer[l]->Units      = Units[l];
    Net->Layer[l]->Output     = (REAL*)  calloc(Units[l]+1, sizeof(REAL));
    Net->Layer[l]->Error      = (REAL*)  calloc(Units[l]+1, sizeof(REAL));
    Net->Layer[l]->Weight     = (REAL**) calloc(Units[l]+1, sizeof(REAL*));
    Net->Layer[l]->WeightSave = (REAL**) calloc(Units[l]+1, sizeof(REAL*));
    Net->Layer[l]->dWeight    = (REAL**) calloc(Units[l]+1, sizeof(REAL*));
    Net->Layer[l]->Output[0]  = BIAS;
      
    if (l != 0) {
      for (i=1; i<=Units[l]; i++) {
        Net->Layer[l]->Weight[i]     = (REAL*) calloc(Units[l-1]+1, sizeof(REAL));
        Net->Layer[l]->WeightSave[i] = (REAL*) calloc(Units[l-1]+1, sizeof(REAL));
        Net->Layer[l]->dWeight[i]    = (REAL*) calloc(Units[l-1]+1, sizeof(REAL));
      }
    }
  }
  Net->InputLayer  = Net->Layer[0];
  Net->OutputLayer = Net->Layer[NUM_LAYERS - 1];
  Net->Alpha       = 0.9;
  Net->Eta         = 0.25;
  Net->Gain        = 1;
}


void RandomWeights(NET* Net)
{
  INT l,i,j;
   
  for (l=1; l<NUM_LAYERS; l++) {
    for (i=1; i<=Net->Layer[l]->Units; i++) {
      for (j=0; j<=Net->Layer[l-1]->Units; j++) {
        Net->Layer[l]->Weight[i][j] = RandomEqualREAL(-0.5, 0.5);
      }

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -