📄 bp.cpp
字号:
#include"stdafx.h"
#include"bp.h"
//#include<fstream.h>
int gCount=0;
double (*fun[3])(double)={purelinf,tansigf,logsigf};
double (*gfun[3])(double)={gpurelinf,gtansigf,glogsigf};
//#define WOUT
//******************************************************************************
double logsigf(double in)
{
return 1.00/(double)(1.00+exp(-in));
}
double tansigf(double in)
{
return (1.00-exp(-in))/(1.00+exp(-in));
}
double purelinf(double in)
{
return in;
}
double glogsigf(double in)
{
return exp(-in)/pow(1+exp(-in),2);
}
double gtansigf(double in)
{
return 2.0*exp(-in)/pow(1+exp(-in),2);
}
double gpurelinf(double in)
{
return 1;
}
void NetNode::InitNetNode(int input)
{
error=0;
output=0;
preoutput=0;
delta=0;
inputs=input;
gradient=new double[input];
lastgradient=new double[input];
pweight=new double[input];
// step=new double[input];
memset(gradient,0,sizeof(double)*inputs);
// memset(step,0,sizeof(double)*inputs);
memset(lastgradient,0,sizeof(double)*inputs);
memset(pweight,0,sizeof(double)*inputs);
cong=0;
lastcong=0;
}
NetNode::~NetNode()
{
delete []gradient;
delete []lastgradient;
delete []pweight;
if(mode==traincgp)
{
delete []lastcong;
delete []cong;
}
}
void NetNode::Reset()
{
error=0;
output=0;
preoutput=0;
delta=0;
// dstep=0;
memcpy(lastgradient,gradient,sizeof(double)*inputs);
memset(gradient,0,sizeof(double)*inputs);
// memset(step,0,sizeof(double)*inputs);
if(mode==traincgp)
{
memcpy(lastcong,cong,sizeof(double)*inputs);
memset(cong,0,sizeof(double)*inputs);
}
}
void NetNode::SetMode(int nMode)
{
mode=nMode;
if(mode==traincgp)
{
cong=new double[inputs];
lastcong=new double[inputs];
memset(cong,0,sizeof(double)*inputs);
memset(lastcong,0,sizeof(double)*inputs);
}
}
double NetNode::Output(NetLayer * input,int funtype)
{
double result=0;
for(int i=0;i<inputs;i++)
{
result+=pweight[i]*input->pNode[i].output;
}
preoutput=result;
output=fun[funtype](result);
/*#ifdef OUT
cout<<preoutput<<" "<<output<<endl;
#endif*/
return output;
}
void NetNode::SearchStep(NetLayer * input,int funtype,double learn)
{
if(mode==traingd)
{
double result=0;
for(int i=0;i<inputs;i++)
{
result+=(pweight[i]+pweight[i]+learn*gradient[i])*input->pNode[i].output;
}
output=fun[funtype](result);
}
if(mode==traincgp)
{
double result=0;
for(int i=0;i<inputs;i++)
{
result+=(pweight[i]+0.9*learn*cong[i]+0.1*lastcong[i])*input->pNode[i].output;
}
output=fun[funtype](result);
}
/*#ifdef OUT
cout<<preoutput<<" "<<output<<endl;
#endif*/
}
void NetNode::InitWeight()
{
gCount++;
double F=sqrt(inputs);
for(int i=0;i<inputs;i++)
{
pweight[i]=0.005;
//int tag=rand()%2;
if(i%3==2)pweight[i]=-pweight[i];
/*#ifdef OUT
cout<<pweight[i]<<" ";
#endif*/
}
}
bool NetNode::AdjustWeithts(double learn,int sample)
{
if(mode==traingd)
{
for(int i=0;i<inputs;i++)
{
pweight[i]=pweight[i]+learn*gradient[i];
#ifdef WOUT
cout<<"step "<<learn<<endl;
cout<<pweight[i]<<" ";
#endif
}
}
if(mode==traincgp)
{
for(int i=0;i<inputs;i++)
{
#ifdef WOUT
cout<<pweight[i]<<" ";
#endif
pweight[i]=pweight[i]+0.9*learn*cong[i]+0.1*lastcong[i];
}
}
return true;
}
/*bool NetNode::CalStep(NetLayer * pre,NetLayer * next,int index,int funtype,int sample)
{
if(pre==0)return false;
if(next==0)
{
dstep=-error*gfun[funtype](preoutput);
for(int i=0;i<inputs;i++)
{
if(mode==traingd)
{
step[i]=dstep*gradient[i]*pre->pNode[i].output;
}
if(mode==traincgp)
{
step[i]=dstep*cong[i]*pre->pNode[i].output;
}
}
}
else
{
if(mode==traingd)
{
int count=next->Units;
dstep=0;
for(int j=0;j<count;j++)
{
dstep+=next->pNode[j].dstep*(next->pNode[j].pweight[index]+
next->pNode[j].step[index]*next->pNode[j].gradient[index]);
}
dstep=dstep*gfun[funtype](preoutput);
for(int i=0;i<inputs;i++)
{
step[i]=dstep*pre->pNode[i].output*gradient[i];
}
}
if(mode==traincgp)
{
int count=next->Units;
dstep=0;
for(int j=0;j<count;j++)
{
dstep+=next->pNode[j].dstep*(next->pNode[j].pweight[index]+
next->pNode[j].step[index]*next->pNode[j].cong[index]);
}
dstep=dstep*gfun[funtype](preoutput);
for(int i=0;i<inputs;i++)
{
step[i]=dstep*pre->pNode[i].output*cong[i];
}
}
}
}*/
bool NetNode::CalGradient(NetLayer * pre,NetLayer * next,int index,int funtype,int sample)
{
if(pre==0)return false;
if(next==0)
{
double g=error*gfun[funtype](preoutput);
delta=g;
for(int i=0;i<inputs;i++)
{
gradient[i]+=delta*pre->pNode[i].output/(double)sample;
}
}
else
{
int count=next->Units;
double g=0;
for(int j=0;j<count;j++)
{
g+=next->pNode[j].delta*next->pNode[j].pweight[index];
}
delta=g*gfun[funtype](preoutput);
for(int i=0;i<inputs;i++)
{
gradient[i]+=delta*pre->pNode[i].output/(double)sample;
}
}
return true;
}
bool NetNode::CalConjuct(double belta,int sample)
{
for(int i=0;i<inputs;i++)
{
cong[i]=belta*lastcong[i]+gradient[i];
// cout<<lastcong[i]<<" "<<cong[i]<<endl;
}
return true;
}
//******************************************************************************
NetLayer::NetLayer(int nodes,int input)
{
memset(this,0,sizeof(NetLayer));
Units=nodes;
pNode=new NetNode[Units];
for(int i=0;i<nodes;i++)
{
pNode[i].InitNetNode(input);
}
Inputs=input;
pre=0;
next=0;
nType=-1;
}
NetLayer::~NetLayer()
{
delete []pNode;
}
void NetLayer::SetConnect(NetLayer * p1,NetLayer * p2)
{
pre=p1;
next=p2;
}
void NetLayer::InitWeight()
{
if(pre==0)return;
for(int i=0;i<Units;i++)
{
/*#ifdef OUT
cout<<"node "<<i<<" : ";
#endif*/
pNode[i].InitWeight();
/*#ifdef OUT
cout<<endl;
#endif*/
}
}
void NetLayer::SetTranFun(int funtype)
{
nType=funtype;
}
void NetLayer::SetTrainMode(int mode)
{
for(int i=0;i<Units;i++)
{
pNode[i].SetMode(mode);
}
}
bool NetLayer::SimLayer()
{
if(pre==0)return false;
/*#ifdef OUT
cout<<"Layer Output"<<endl;
#endif*/
for(int i=0;i<Units;i++)
{
pNode[i].Output(pre,nType);
}
/*#ifdef OUT
cout<<endl;
#endif*/
return true;
}
bool NetLayer::Search(double learn)
{
if(pre==0)return false;
/*#ifdef OUT
cout<<"Layer Output"<<endl;
#endif*/
for(int i=0;i<Units;i++)
{
pNode[i].SearchStep(pre,nType,learn);
}
/*#ifdef OUT
cout<<endl;
#endif*/
return true;
}
bool NetLayer::SetInputLayer(double * in,int index)
{
if(pre!=0)return false;
/*#ifdef OUT
cout<<"Input"<<endl;
#endif*/
for(int i=0;i<Units;i++)
{
pNode[i].output=in[index*Units+i];
// cout<<pNode[i].output;
/*#ifdef OUT
cout<<pNode[i].output<<" ";
#endif*/
}
return true;
}
bool NetLayer::AdjustWeithts(double learn,int sample)
{
#ifdef WOUT
cout<<"Layer "<<endl;
#endif
for(int i=0;i<Units;i++)
{
pNode[i].AdjustWeithts(learn,sample);
}
return true;
}
bool NetLayer::CalGradient(int sample)
{
for(int i=0;i<Units;i++)
{
pNode[i].CalGradient(pre,next,i,nType,sample);
}
return true;
}
/*void NetLayer::CalStep(int sample)
{
for(int i=0;i<Units;i++)
{
pNode[i].CalStep(pre,next,i,nType,sample);
}
}*/
bool NetLayer::Reset()
{
for(int i=0;i<Units;i++)
{
pNode[i].Reset();
}
return true;
}
void NetLayer::CalConjuct(double belta,int sample)
{
for(int i=0;i<Units;i++)
{
pNode[i].CalConjuct(belta,sample);
}
}
//******************************************************************************
NNet::NNet()
{
memset(this,0,sizeof(NNet));
}
NNet::NNet(int inputs,int hides,int outputs)
{
memset(this,0,sizeof(NNet));
freedom=inputs*hides+hides*outputs;
srand(time(NULL));
InputLayer=new NetLayer(inputs,0);
layer=new NetLayer(hides,inputs);
OutputLayer=new NetLayer(outputs,hides);
InputLayer->SetConnect(0,layer);
layer->SetConnect(InputLayer,OutputLayer);
OutputLayer->SetConnect(layer,0);
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -