📄 bpnn.cpp
字号:
//---------------------------------------------------------------------------
#include <vcl.h>
#pragma hdrstop
#include <algorithm.h>
#include <vector.h>
#include <math.h>
#include <stdio.h>
#include <alloc.h>
#include "Bpnn.h"
#include <mmsystem.h>
//---------------------------------------------------------------------------
#pragma package(smart_init)
#pragma link "CSPIN"
#pragma resource "*.dfm"
////
////
#define MAXLAYERS 5 //最大层数
#define MAXNNUM 60 //每层最大神经元数
#define MAXTrainDataNum 208 //135 // 9*15 iris.txt文件共150条数据
#define MAXTestDataNum 208 ///15 // 150/15
#define MAXLEARNNUM 1000 //最大学习次数
#define MAXONUM 10
double W[MAXLAYERS][MAXLAYERS][MAXNNUM][MAXNNUM]; //权矩阵
double dW[MAXLAYERS][MAXLAYERS][MAXNNUM][MAXNNUM]; //dw矩阵
double d[MAXLAYERS][MAXNNUM]; //求dW时用到的一个中间变量
double TH[MAXLAYERS][MAXNNUM]; //阈值矩阵
double dTH[MAXLAYERS][MAXNNUM]; //dTH阈值
double learnRatio[MAXLAYERS];//学习率,注意位置
double Sum[MAXLAYERS][MAXNNUM];//各层输入和->激活值
double Out[MAXLAYERS][MAXNNUM];//各层输出和
//double FinalOut[MAXNNUM];//for 辨识
int NNum[MAXLAYERS];//各层神经元数
double trainData[MAXTrainDataNum][MAXNNUM+MAXONUM];// 训练数据:输入+ 期望输出
double testData[MAXTestDataNum][MAXNNUM+MAXONUM]; // 测试数据:输入+ 期望输出
/////
double err[MAXLEARNNUM]; //全局误差
double ERR;//全局误差限
double singleErr;
int learnN;
double sum;
int i,j,k,t,n,pren,postn,randT; //for circulate
int I,H,O,L;//for input ,hidden,output
int LAYERS=3;//层数
int trainDataNum=104;//104;// iris.txt文件共150条数据
int testDataNum=208; // iris.txt文件共150条数据
int randList[MAXTrainDataNum];//for 训练集随机抽取
DWORD time1,time2,time2_1;//for time count
double C[MAXLAYERS][MAXNNUM][MAXNNUM];//connect
///
FILE *outputFile;
/////
TForm1 *Form1;
//---------------------------------------------------------------------------
__fastcall TForm1::TForm1(TComponent* Owner)
: TForm(Owner)
{
}
//---------------------------------------------------------------------------
void __fastcall TForm1::readBtnClick(TObject *Sender)
{
char nouse;
char outStr[15];
FILE *inputFile;
inputFile=fopen("sonar.data","rb");
outputFile=fopen("out.data","wb");
ListBox1->Clear();
///前45条为训练数据,后5条为测试数据
j=0;
while (!feof(inputFile))
{
for ( i=0;i<60;i++)
{
fscanf(inputFile, "%lf", &trainData[j][i]);
fread(&nouse, 1, 1, inputFile);
}
fread(&nouse, 1, 1, inputFile);
if(nouse=='M') //M
{trainData[j][60]=1;
trainData[j][61]=0;}
else //R
{trainData[j][60]=0;
trainData[j][61]=1;}
fread(&nouse, 1, 1, inputFile);
j++;
}
fclose(inputFile);
for( i=0;i<j-1;i++) //use j on up
ListBox1->Items->Append( IntToStr(i+1)
+" "+FloatToStr(trainData[i][0])
+" "+FloatToStr(trainData[i][1])
+" "+FloatToStr(trainData[i][2])
+" "+FloatToStr(trainData[i][3])
+" "+FloatToStr(trainData[i][4])
+" "+FloatToStr(trainData[i][5])
+" "+FloatToStr(trainData[i][6])
+" "+FloatToStr(trainData[i][60])
+" "+FloatToStr(trainData[i][61]));
//
for ( i=0;i<testDataNum;i++)
{
for ( j=0;j<62;j++)
testData[i][j]=trainData[i][j];//trainData[trainDataNum+i][j];// trainData[i][j];////
}
//
Label3->Caption="0";
//
////////////////
}
//---------------------------------------------------------------------------
void __fastcall TForm1::init()
{
I=0;
H=1;
O=2;
LAYERS=3;//层数
//LAYERS=4;//层数
NNum[I]=60; //input layer 神经元数
//NNum[1]=40; //hidden layer
//NNum[2]=20; //hidden layer
NNum[H]=24; //hidden layer
NNum[O]=2; //output layer
//NNum[3]=2; //output layer
learnN=0;
sum=0;
ERR=0.0002; //0.0005;
randomize();
//begin 权、阈值 赋初值
for(L=1;L<LAYERS;L++ )
{
for(n=0;n<NNum[L];n++) //当前层
{
for( pren=0;pren<NNum[L-1];pren++ ) //前一层
{
W[L-1][L][pren][n]=myRand()*3;//10; //rand[-0.1,0.1] *3
}
TH[L][n]=myRand()*3;//10; //rand[-0.1,0.1] *3
}
}
//end 权、阈值 赋初值
//begin learRadio
for(L=0;L<LAYERS;L++ )
learnRatio[L]=2.0;////0.006;//0.5? 学习率,注意位置
//end learRadio
fnninit();
}
//---------------------------------------------------------------------------
void __fastcall TForm1::fnninit()
{
double nDim=1.0;//neural network's dimension
double fDim=0.6;//fractal neural network's dimension
int A=1;
double rou;//for random
int count,TotalCount;
int Q[MAXLAYERS][MAXNNUM];//存放坐标
double dist[MAXLAYERS][MAXNNUM][MAXNNUM];//距离
double CP[MAXLAYERS][MAXNNUM][MAXNNUM];//connect probability
count=0;
TotalCount=0;
//No.0 layer
for(i=0;i<NNum[0];i++)
Q[0][i]=i;
//No.1~N0.LAYER-3 layer
for ( k=1;k<=LAYERS-2;k++ )
for(i=0;i<NNum[k];i++)
Q[k][i]=(int)(NNum[k-1]*(2*i-1)/(2*NNum[k]))+1;///?取大于本数的最小整数
///
for ( k=0;k<LAYERS-2;k++ )
for(i=0;i<NNum[k];i++)
for(j=0;j<NNum[k+1];j++)
{
dist[k][i][j]=sqrt( pow( Q[k][i]-Q[k+1][j] , 2) +1 ); //(k+1 -k )^2=1
CP[k][i][j]=A*pow(dist[k][i][j],fDim-nDim);
rou=(rand()+0.0)/0x7FFFU ;
if(CP[k][i][j]>=rou)
C[k][i][j]=1;
else {C[k][i][j]=0;count++;}
}
for ( k=0;k<LAYERS-1;k++ )
TotalCount+=NNum[k]*NNum[k+1];
//
String str;
if((TotalCount-count)==TotalCount) str="100";
else str=(FloatToStr((TotalCount-count)/(TotalCount+0.0))).SubString(3,2);
fprintf(outputFile, "%s% ", str); //
Label5->Caption=str+"%";
//
str=FloatToStr(fDim);//(FloatToStr(fDim).SubString(3,2);
Label9->Caption=str;
}
//---------------------------------------------------------------------------
void __fastcall TForm1::trainBtnClick(TObject *Sender)
{
init();
TimeStart();
do
{
randListF();//for 训练集随机抽取,result is randList[trainDataNum]
for( t=0;t<trainDataNum;t++)
{
randT=randList[t];///随机选取的训练集
forward(randT); //use randT , notice t was used
backward(randT); //use randT
//改进算法用于此
}
error();
learnN=learnN+1;
}
while( (learnN<MAXLEARNNUM) & (err[learnN-1]>ERR) ) ;
String str;
str=IntToStr(learnN);
fprintf(outputFile, "%s ", str);//
Label13->Caption=str+"次";
TimeEnd(Sender);
}
//---------------------------------------------------------------------------
void __fastcall TForm1::forward(int trainID)
{ // notice t was used
//输入层的输出
for(n=0;n<NNum[0];n++)
Out[0][n]=trainData[trainID][n];
//begin 其他层的输出
for(L=1;L<LAYERS;L++) //哪一层
{
for(n=0;n<NNum[L];n++) //哪一层哪个神经元
{
sum=0.0;
for(pren=0;pren<NNum[L-1];pren++) //前一层哪个神经元
{
//begin proc fractal neural network
if(RadioF->Checked==true)
{ if( (C[L-1][pren][n]==0)&&( (L-1)<=LAYERS-3 ))
W[L-1][L][pren][n]=0;
}
//end proc fractal neural network
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -