⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 nuralnet.cpp

📁 bp nuralnetwork code
💻 CPP
字号:
#include<iostream>#include<math.h>/***************** predefinition ******************/#define TRUE 1#define FALSE 0#define NUM_LAYERS 3#define NUM_INPUT 15#define NUM_OUTPUT 1int Units[NUM_LAYERS] = {NUM_INPUT, 20, NUM_OUTPUT};#define NUM_SAMPLES 60#define TRAIN_LWB   (NUM_INPUT)#define TRAIN_UPB   (45)#define TRAIN_TIMES (TRAIN_UPB - TRAIN_LWB + 1)#define TEST_LWB    (46)#define TEST_UPB    (53)#define TEST_TIMES  (TEST_UPB - TEST_LWB + 1)#define EVAL_LWB    (54)#define EVAL_UPB    (NUM_SAMPLES - 1)#define EVAL_TIMES  (EVAL_UPB - EVAL_LWB + 1)#define HI 0.9#define LO 0.1#define BIAS 1double Samples_[NUM_SAMPLES];double Samples[NUM_SAMPLES] = {  1.0, 1.2, 1.4, 1.6, 1.8, 2.0, 2.2, 2.4, 2.6, 2.8, 3.0, 3.2, 3.4, 3.6, 3.8, 4.0, 4.2, 4.4, 4.6, 4.8,  5.0, 5.2, 5.4, 5.6, 5.8, 6.0, 6.2, 6.4, 6.6, 6.8, 7.0, 7.2, 7.4, 7.6, 7.8, 8.0, 8.2, 8.4, 8.6, 8.8,  9.0, 9.2, 9.4, 9.6, 9.8, 10.0,10.2,10.4,10.6,10.8,11.0,11.2,11.4,11.6,11.8,12.0,12.2,12.4,12.6,12.8  };double Mean;double TrainError;double TrainErrorMean;double TestError;double TestErrorMean;/************** net definiton *****************//****** layer definiton ****/struct LayerStruct{  int      Units;  double*  Output;  double*  Error;  double** Weight;  double** WeightSave;  double** DeltaWeight;} ;/**** net definition ****/struct NetStruct{  LayerStruct** Layer;  /**all layer of the net***/  LayerStruct*  InputLayer;  LayerStruct*  OutputLayer;  double        Alpha;  double        Eta;  double        Gain;  double        Error;};/***************** random ************************/void InitializeRandoms(){  srand(4711);}int IntRandom(int Low, int High){  return rand() % (High - Low + 1) + Low;}double DoubleRandom(double Low, double High){  return(static_cast<double>(rand()) / RAND_MAX) * (High - Low) + Low;}/************** application initialization *********************/double ReturnMin(double x1, double x2){  if(x1 > x2) return x2;  else return x1;}double ReturnMax(double x1, double x2){  if(x1 > x2) return x1;  else return x2;}void Normalize(){  int time;  double Min,Max;  Min = 10000;  Max = -10000;  for(time = 0; time < NUM_SAMPLES; time++){    Min = ReturnMin(Min, Samples[time]);    Max = ReturnMax(Max, Samples[time]);  }  Mean = 0;  for(time = 0; time < NUM_SAMPLES; time++){    Samples_[time] = Samples[time] = ((Samples[time] - Min)) / (Max - Min) * (HI - LO) + LO;    Mean += Samples[time] / NUM_SAMPLES;  }}void InitializeNet(NetStruct* Net){  int time,i;  double Out,Err;  Net->Alpha = 0.5;  Net->Eta   = 0.05;  Net->Gain  = 1.0;  Normalize();  TrainErrorMean = 0;  for(time = TRAIN_LWB; time <= TRAIN_UPB; time++){    for(i = 0; i < NUM_OUTPUT; i++){      Out = Samples[time + i];      Err = Mean - Out;      TrainErrorMean += Err * Err / 2;    }  }  for(time = TEST_LWB; time <= TEST_UPB; time++){    for(i = 0; i < NUM_OUTPUT; i++){      Out = Samples[time + i];      Err = Mean - Out;      TestErrorMean += Err * Err / 2;    }  }}/************ net initialization ************/void GenerateNet(NetStruct* Net){  int l,i;  Net->Layer = new LayerStruct*[NUM_LAYERS];  for(l = 0; l < NUM_LAYERS; l++){    Net->Layer[l] = new LayerStruct;    Net->Layer[l]->Units        = Units[l];    Net->Layer[l]->Output       = new double[Units[l] + 1];    Net->Layer[l]->Error        = new double[Units[l] + 1];    Net->Layer[l]->Weight       = new double*[Units[l] +1 ];    Net->Layer[l]->WeightSave   = new double*[Units[l] +1 ];    Net->Layer[l]->DeltaWeight  = new double*[Units[l] +1 ];    Net->Layer[l]->Output[0]    = BIAS;    if(l != 0){      for(i = 1; i <= Units[l]; i++){	Net->Layer[l]->Weight[i]      = new double[Units[l - 1] + 1];	Net->Layer[l]->WeightSave[l]  = new double[Units[l - 1] + 1];	Net->Layer[l]->DeltaWeight[l] = new double[Units[l - 1] + 1];      }    }  }  Net->InputLayer  = Net->Layer[0];  Net->OutputLayer = Net->Layer[NUM_LAYERS - 1];  Net->Alpha       = 0.9;  Net->Eta         = 0.25;  Net->Gain        = 1;}void RandomWeights(NetStruct* Net){  int l,i,j;  for(l = 1; l < NUM_LAYERS; l++){    for(i = 1; i <= Net->Layer[l]->Units; i++){      for(j = 0; j <= Net->Layer[l - 1]->Units; j++){	Net->Layer[l]->Weight[i][j] = DoubleRandom(-0.5,0.5);      }    }  }}void SetInput(NetStruct* Net, double* Input){  int i;  for(i = 1; i <= Net->InputLayer->Units; i++){    Net->InputLayer->Output[i] = Input[i - 1];  }}void GetOutput(NetStruct* Net, double* Output){  int i;  for(i = 1; i <= Net->OutputLayer->Units; i++){    Output[i - 1] = Net->OutputLayer->Output[i];  }}/***************** stop training ******************/void SaveWeights(NetStruct* Net){  int i,j,l;  for(l = 1; l < NUM_LAYERS; l++){    for(i = 1; i <= Net->Layer[l]->Units; i++){      for(j = 0; j <= Net->Layer[l - 1]->Units; j++){	Net->Layer[l]->WeightSave[i][j] = Net->Layer[l]->Weight[i][j];      }    }  }}void RestoreWeights(NetStruct* Net){  int i,j,l;  for(l = 1; l < NUM_LAYERS; l++){    for(i = 1; i <= Net->Layer[l]->Units; i++){      for(j = 0; j <= Net->Layer[l - 1]->Units; j++){	Net->Layer[l]->Weight[i][j] = Net->Layer[l]->WeightSave[i][j];      }    }  }}/*********** propagate signals *************/void PropagateLayer(NetStruct* Net, LayerStruct* Lower, LayerStruct* Upper){  int i,j;  double Sum;  for(i = 1; i <= Upper->Units; i++){    Sum = 0;    for(j = 0; j <= Lower->Units; j++){      Sum += Upper->Weight[i][j] * Lower->Output[j];    }    Upper->Output[i] = 1 / (1 + exp(-Net->Gain * Sum));  }}void PropagateNet(NetStruct* Net){  int i;  for(i = 0; i < NUM_LAYERS - 1; i++){    PropagateLayer(Net, Net->Layer[i], Net->Layer[i + 1]);  }}/************ backpropagation error ****************/void ComputeOutputError(NetStruct* Net, double* Target){  int i;  double Out,Err;  Net->Error = 0;  for(i = 1; i <= Net->OutputLayer->Units; i++){    Out = Net->OutputLayer->Output[i];    Err = Target[i - 1] - Out;    Net->OutputLayer->Error[i] = Net->Gain * Out * (1 - Out) * Err;    Net->Error += Err * Err / 2;  }}void BackpropagateLayer(NetStruct* Net, LayerStruct* Upper, LayerStruct* Lower){  int i,j;  double Out,Err;  for(i = 1; i <= Lower->Units; i++){    Out = Lower->Output[i];    Err = 0;    for(j = 1; j <= Upper->Units; j++){      Err += Upper->Weight[j][i] * Upper->Error[j];    }    Lower->Error[i] = Net->Gain * Out * (1 - Out) * Err;  }}void BackpropagateNet(NetStruct* Net){  int i;  for(i = NUM_LAYERS - 1; i > 1; i--){    BackpropagateLayer(Net, Net->Layer[i], Net->Layer[i - 1]);  }}void AdjustWeights(NetStruct* Net){  int l,i,j;  double Out,Err,DeltaWeight;  for(l = 1; l < NUM_LAYERS; l++){    for(i = 1; i <= Net->Layer[l]->Units; i++){      for(j = 0; j <= Net->Layer[l - 1]->Units; j++){	Out = Net->Layer[l - 1]->Output[j];	Err = Net->Layer[l]->Error[i];	DeltaWeight = Net->Layer[l]->DeltaWeight[i][j];	Net->Layer[l]->Weight[i][j] += Net->Eta * Err * Out + Net->Alpha * DeltaWeight;	Net->Layer[l]->DeltaWeight[i][j] = Net->Eta * Err * Out;      }    }  }}/*************** net simulation ***************/void SimulateNet(NetStruct* Net, double* Input, double* Output, double* Target, bool Training){  SetInput(Net, Input);  PropagateNet(Net);  GetOutput(Net, Output);  ComputeOutputError(Net, Target);  if(Training){    BackpropagateNet(Net);    AdjustWeights(Net);  }}void TrainNet(NetStruct* Net, int Epochs){  int time,n;  double Output[NUM_OUTPUT];  for(n = 0; n < Epochs * TRAIN_TIMES; n++){    time = IntRandom(TRAIN_LWB, TRAIN_UPB);    SimulateNet(Net, &(Samples[time - NUM_INPUT]), Output, &(Samples[time]), TRUE);  }}void TestNet(NetStruct* Net){  int time;  double Output[NUM_OUTPUT];  for(time = TRAIN_LWB; time <= TRAIN_UPB; time++){    SimulateNet(Net, &(Samples[time - NUM_INPUT]), Output, &(Samples[time]), FALSE);    TrainError += Net->Error;  }  TestError = 0;  for(time = TEST_LWB; time <= TEST_UPB; time++){    SimulateNet(Net, &(Samples[time - NUM_INPUT]), Output, &(Samples[time]), FALSE);    TestError += Net->Error;  }  std::cout << "\nin training NMSE = " << TrainError / TrainErrorMean 	    << "\nin test     NMSE = " << TestError  / TestErrorMean;}void EvaluateNet(NetStruct* Net){  int time;  double Output[NUM_OUTPUT];  double Output_[NUM_OUTPUT];  std::cout <<  "\n\nTime\t\tSpeed\t\tOpenloop\t\tClose\n\n";  for(time = EVAL_LWB; time <= EVAL_UPB; time++){    SimulateNet(Net, &(Samples[time - NUM_INPUT]), Output, &(Samples[time]), FALSE);    SimulateNet(Net, &(Samples_[time - NUM_INPUT]), Output_, &(Samples[time]), FALSE);    Samples_[time] = Output_[0];    std::cout << time << "\t\t" << Samples[time] << "\t" << Output[0] << "\t" << Output[0] << "\n";  }}/****************** main() *********************/int main(){  NetStruct Net;  bool Stop;  double MinTestError;    InitializeRandoms();  GenerateNet(&Net);  RandomWeights(&Net);  InitializeNet(&Net);  Stop = FALSE;  MinTestError = 10000;  /* do{    TrainNet(&Net, 10);    TestNet(&Net);    if(TestError < MinTestError){      std::cout << "....saving weights \n";      MinTestError = TestError;      SaveWeights(&Net);      std::cout << "saved\n";    }    else if (TestError > 1.2 * MinTestError){      std::cout << "....restoring weights \n";      Stop = TRUE;      RestoreWeights(&Net);    }    }while (!Stop);*/  TestNet(&Net);  EvaluateNet(&Net);}

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -