📄 bp.cpp
字号:
Error=new double[outputs];
memset(Error,0,sizeof(double)*outputs);
nInput=inputs;
nOutput=outputs;
fError=0;
nMode=0;
samples=0;
}
NNet::~NNet()
{
delete InputLayer;
delete layer;
delete OutputLayer;
delete []goal;
delete []Error;
}
void NNet::InitNet(int inputs,int hides,int outputs)
{
freedom=inputs*hides+hides*outputs;
srand(time(NULL));
InputLayer=new NetLayer(inputs,0);
layer=new NetLayer(hides,inputs);
OutputLayer=new NetLayer(outputs,hides);
InputLayer->SetConnect(0,layer);
layer->SetConnect(InputLayer,OutputLayer);
OutputLayer->SetConnect(layer,0);
Error=new double[outputs];
memset(Error,0,sizeof(double)*outputs);
nInput=inputs;
nOutput=outputs;
fError=0;
nMode=0;
samples=0;
}
void NNet::Sim(int index)
{
layer->SimLayer();
OutputLayer->SimLayer();
CalError(index);
}
void NNet::Sim(double * input,double * output)
{
InputLayer->SetInputLayer(input,0);
layer->SimLayer();
OutputLayer->SimLayer();
for(int i=0;i<OutputLayer->Units;i++)
{
output[i]=OutputLayer->pNode[i].output;
}
}
double NNet::Search(double * in,double learn,int index)
{
InputLayer->SetInputLayer(in,index);
layer->Search(learn);
OutputLayer->Search(learn);
double error=0;
for(int i=0;i<nOutput;i++)
{
double temp=goal[index*nOutput+i]-OutputLayer->pNode[i].output;
error+=temp*temp*0.5;
}
return error;
}
void NNet::CalError(int index)
{
// cout<<" "<<goal[index*nOutput+index]<<endl;
for(int i=0;i<nOutput;i++)
{
Error[i]=goal[index*nOutput+i]-OutputLayer->pNode[i].output;
OutputLayer->pNode[i].error=Error[i];
fError+=Error[i]*Error[i]*0.5;
}
}
void NNet::AdjustWeights(double learn,int nSample)
{
layer->AdjustWeithts(learn ,nSample);
OutputLayer->AdjustWeithts(learn,nSample);
}
void NNet::SetTranFun(int * funtype)
{
layer->SetTranFun(funtype[0]);
OutputLayer->SetTranFun(funtype[1]);
}
void NNet::SetTrainMode(int mode)
{
nMode=mode;
layer->SetTrainMode(mode);
OutputLayer->SetTrainMode(mode);
}
bool NNet::CheckStep()
{
double check=0;
int count=layer->Units;
int i=0;
for(i=0;i<count;i++)
{
for(int j=0;j<layer->Inputs;j++)
{
check+=layer->pNode[i].gradient[j]*layer->pNode[i].gradient[j];
}
}
count=OutputLayer->Units;
for(i=0;i<count;i++)
{
for(int j=0;j<OutputLayer->Inputs;j++)
{
check+=OutputLayer->pNode[i].gradient[j]*OutputLayer->pNode[i].gradient[j];
}
}
//cout<<"check "<<check<<endl;
if(check<=1e-10)return false;
return true;
}
void NNet::Train(double * in,double * pgoal,int nSample,int nEpoch,
double minmse,double minl,double maxl)
{
bool * used=new bool[nSample];
goal=new double[nOutput*nSample];
memcpy(goal,pgoal,sizeof(double)*nOutput*nSample);
samples=nSample;
layer->InitWeight();
OutputLayer->InitWeight();
int nCon=0;
for(int i=0;i<nEpoch;i++)
{
layer->Reset();
OutputLayer->Reset();
if(i>0)memset(used,0,sizeof(bool)*nSample);
fError=0;
memset(Error,0,sizeof(double)*nOutput);
for(int j=0;j<nSample;j++)
{
int index=j;
/* if(i>0)
{
while(1)
{
int r=rand()%nSample;
if(!used[r])
{
index=r;
used[r]=true;
break;
}
}
}
*/
InputLayer->SetInputLayer(in,index);
Sim(index);
OutputLayer->CalGradient(nSample);
layer->CalGradient(nSample);
}
// if(i%100==0)deb<<"Train "<<i<<" "<<fError<<endl;
if(nMode==traincgp)
{
CalConject(nCon,nSample);
}
if(fError<=minmse)
{
// cout<<"Train "<<i<<" "<<fError<<endl;
break;
}
if(!CheckStep())break;
if(nMode==traincgp&&!CheckConj())
{
nCon=0;
continue;
}
else
{
nCon++;
}
if(nMode==traincgp&&nCon%freedom==0)
{
nCon=0;
continue;
}
double re=0;
double step=CalStep(in,minl,maxl,re);
if(nMode==traincgp&&nCon>1&&re/(double)(2*nSample)>=fError)
{
nCon=0;
continue;
}
AdjustWeights(step,nSample);
//int oo;cin>>oo;
}
delete []used;
}
bool NNet::CheckConj()
{
double check=0;
int count=layer->Units;
int i=0;
for(i=0;i<count;i++)
{
for(int j=0;j<layer->Inputs;j++)
{
check+=layer->pNode[i].cong[j]*layer->pNode[i].gradient[j];
}
}
count=OutputLayer->Units;
for(i=0;i<count;i++)
{
for(int j=0;j<OutputLayer->Inputs;j++)
{
check+=OutputLayer->pNode[i].cong[j]*OutputLayer->pNode[i].gradient[j];
}
}
//cout<<"check "<<check<<endl;
if(check<=0)return false;
return true;
}
double NNet::CalStep(double * in,double min ,double max,double & result)
{
if(max-min<0.01)return max;
double step=0;
double t1=0,t2=0;
double r1=0,r2=0;
t1=min+0.382*(max-min);
t2=min+0.618*(max-min);
for(int i=0;i<samples;i++)
{
r1+=Search(in,t1,i);
r2+=Search(in,t2,i);
}
while(1)
{
if(r1>r2)
{
if(max-t1<0.01)
{
step=t2;
result=r2;
break;
}
min=t1;
t1=t2;
r1=r2;
t2=min+0.618*(max-min);
r2=0;
for(int i=0;i<samples;i++)
{
r2+=Search(in,t2,i);
}
}
else
{
if(t2-min<0.01)
{
step=t1;
result=r1;
break;
}
max=t2;
t2=t1;
r2=r1;
t1=min+0.382*(max-min);
r1=0;
for(int i=0;i<samples;i++)
{
r1+=Search(in,t1,i);
}
}
}
// cout<<"r "<<r1/122.0<<" "<<r2/122.0<<endl;
return step;
}
bool NNet::CalConject(int index,int sample)
{
if(index%freedom==0)
{
layer->CalConjuct(0,sample);
OutputLayer->CalConjuct(0,sample);
return true;
}
double belta=0;
double g1=0,g0=0;
int i=0;
int count=layer->Units;
for(i=0;i<count;i++)
{
for(int j=0;j<nInput;j++)
{
double tg=layer->pNode[i].gradient[j];
g0+=tg*tg;
tg=layer->pNode[i].lastgradient[j];
g1+=tg*tg;
}
}
count=OutputLayer->Units;
int hide=layer->Units;
for(i=0;i<count;i++)
{
for(int j=0;j<hide;j++)
{
double tg=OutputLayer->pNode[i].gradient[j];
g0+=tg*tg;
tg=OutputLayer->pNode[i].lastgradient[j];
g1+=tg*tg;
}
}
belta=g0/g1;
// cout<<"conj "<<belta<<endl;
layer->CalConjuct(belta,sample);
OutputLayer->CalConjuct(belta,sample);
// cout<<endl;
return true;
}
//***********************************************************************
DNANNet::DNANNet()
{
for(int i=0;i<4;i++)
{
net[i]=NULL;
}
}
DNANNet::~DNANNet()
{
Clear();
}
BOOL DNANNet::Training(double **ppSample,int* pClass,int n,int c)
{
// fstream files;
// files.open("input.txt",ios::out);
int i=0;
for(i=0;i<4;i++)
{
if(net[i])
{
delete net[i];
}
net[i]=new NNet;
net[i]->InitNet(22,2,1);
}
double * in=new double[n*c];
double ** out4=Alloc2D(4,n,0);
for(i=0;i<n;i++)
{
for(int j=0;j<c;j++)
{
in[i*c+j]=ppSample[i][j];
// files<<in[i*c+j]<<" ";
}
// files<<endl;
}
for(i=0;i<n;i++)
{
out4[pClass[i]][i]=1;
}
double t=0.0;
for(i=0;i<4;i++)
{
int funtype[2]={logsig,purelin};
net[i]->SetTranFun(funtype);
net[i]->SetTrainMode(traincgp);
net[i]->Train(in,out4[i],62,5000,0.005,0,10);
t=(double)(i+1)/4.0;
ReportStatus(&t);
}
Delete2D(out4,4);
delete []in;
return TRUE;
}
BOOL DNANNet::Classify(double **ppTestSample,int* pClass,int n,int c)
{
// fstream files;
// files.open("output.txt",ios::app);
double * in=new double[n*c];
int i=0;
for(i=0;i<n;i++)
{
for(int j=0;j<c;j++)
{
in[i*c+j]=ppTestSample[i][j];
}
}
for(int test=0;test<n;test++)
{
int maxpos=-1;
double max=-9999999;
for(int i=0;i<4;i++)
{
double re;
net[i]->Sim(in+test*c,&re);
// files<<re<<" ";
if(re>max)
{
max=re;
maxpos=i;
}
}
// files<<"re "<<maxpos<<endl;
pClass[test]=maxpos;
}
return TRUE;
}
void DNANNet::Clear()
{
for(int i=0;i<4;i++)
{
if(net[i])
{
delete net[i];
net[i] = NULL;
}
}
}
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -