📄 newnn2.cpp
字号:
enum Flag{fPrior,fNext};
enum NType{inputLayer,hiddenLayer,outputLayer};
#define a 1.7159
#define b 0.666666
#include "stdlib.h"
#include "malloc.h"
#include "iostream.h"
#include "math.h"
unsigned long z;
class Neural;
class Media;
double drand();
class Media{
private:
int numberOfPriorNeural;
int numberOfNextNeural;
int cursorOfPrior;
int cursorOfNext;
int currentWeightState;
int currentState;
double ** weightChange;
double ** weightArray[2];
double * bias[2];
double * biasChange;
bool init;
Neural ** priorList;
Neural ** nextList;
int numberOfIteration;
double learningRate;
public:
Media(const int nPL,const int nNL);
~Media();
bool Push(Neural * nPointer,int layerID,Flag f);
bool InitWeight();
double GetDerivative(int IDInLayer);
bool WeightChange(int IDInLayer,double localGradient);
double ForwardCompute(int IDInLayer);
void WeightPrint();
int GetCursorOfPrior();
int GetCursorOfNext();
int GetNumberOfPriorNeural();
int GetNumberOfNextNeural();
bool UpdatePerIteration();
};
class Neural{
private:
NType neuralType;
Media * priorMedia;
Media * nextMedia;
int IDInLayer;
bool gradientUpdate;
double value;
double localGradient;
int numberOfNeuralInPriorLayer;
int numberOfNeuralInNextLayer;
public:
Neural(NType nt,int ID,Media * priorM,Media * nextM);
~Neural();
double ForwardCompute();
double GetValue();
double GetGradient();
bool computeNewGradient();
bool computeNewGradient(double desiredValue);
bool WeightAdjust();
bool SetInputNeuralValues(double inputValue);
};
bool Media::WeightChange(int IDInLayer,double localGradient){
//cout<<"ID="<<IDInLayer<<endl;
//cout<<"gradient"<<localGradient<<endl;
//cout<<"learingRate"<<learningRate<<endl;
for(int i=0;i<cursorOfPrior;i++){
//cout<<"oldWeight="<<weightArray[i][IDInLayer]<<endl;
//cout<<"oldweightChange="<<weightChange[i][IDInLayer]<<endl;
//cout<<"value="<<priorList[i]->GetValue()<<endl;
weightChange[i][IDInLayer]=learningRate*localGradient*(priorList[i]->GetValue())
+0.9*weightChange[i][IDInLayer];
weightArray[1-currentState][i][IDInLayer]=weightArray[currentState][i][IDInLayer]
+weightChange[i][IDInLayer];
//cout<<"computedWeight="<<weightArray[i][IDInLayer]<<endl;
}
//cout<<"oldBiasChange="<<biasChange[IDInLayer]<<endl;
//cout<<"oldBias="<<bias[IDInLayer]<<endl;
biasChange[IDInLayer]=learningRate*localGradient*1
+0.9*biasChange[IDInLayer];
bias[1-currentState][IDInLayer]=bias[currentState][IDInLayer]+biasChange[IDInLayer];
return true;
}
bool Media::UpdatePerIteration(){
currentState=1-currentState;
numberOfIteration++;
if(numberOfIteration<2000) learningRate=0.01;//0.2/(1+20*numberOfIteration);
else learningRate/=numberOfIteration;
return true;
}
int Media::GetCursorOfPrior(){
return cursorOfPrior;
}
int Media::GetCursorOfNext(){
return cursorOfNext;
}
int Media::GetNumberOfPriorNeural(){
return numberOfPriorNeural;
}
int Media::GetNumberOfNextNeural(){
return numberOfNextNeural;
}
double Media::ForwardCompute(int IDInLayer){
double temp=0;
for(int i=0;i<numberOfPriorNeural;i++){
temp+=weightArray[currentState][i][IDInLayer]*priorList[i]->GetValue();
// cout<<"weight= "<<weightArray[i][IDInLayer]<<" value= "<<priorList[i]->GetValue()<<endl;
}
temp+=bias[currentState][IDInLayer]*1;
// cout<<"bias= "<<bias[IDInLayer]<<endl;
// cout<<"temp= "<<temp<<endl;
return temp;
}
void Media::WeightPrint(){
int i,j;
for(i=0;i<cursorOfPrior;i++){
for(int j=0;j<cursorOfNext;j++){
cout<<(weightArray[currentState][i][j])<<" & ";
cout<<(weightArray[1-currentState][i][j])<<" ";
}
cout<<endl;
}
for(j=0;j<cursorOfNext;j++) cout<<bias[currentState][j]<<" | "<<bias[1-currentState][j];
cout<<endl;
cout<<"change"<<endl;
for(i=0;i<cursorOfPrior;i++){
for(int j=0;j<cursorOfNext;j++){
cout<<weightChange[i][j]<<" ";
}
cout<<endl;
}
for(j=0;j<cursorOfNext;j++) cout<<biasChange[j]<<" ";
cout<<endl;
}
Media::Media(const int nPL,const int nNL){
currentWeightState=0;
numberOfPriorNeural=nPL;
numberOfNextNeural=nNL;
cursorOfPrior=0;
cursorOfNext=0;
numberOfIteration=1;
currentState=0;
learningRate=0.2/(1+20*numberOfIteration);
weightChange=(double **)malloc(nPL*sizeof(double *));
weightArray[currentState]=(double **)malloc(nPL*sizeof(double *));
weightArray[1-currentState]=(double **)malloc(nPL*sizeof(double *));
for(int j=0;j<nPL;j++){
weightChange[j]=(double *)malloc(nNL*sizeof(double));
weightArray[currentState][j]=(double *)malloc(nNL*sizeof(double));
weightArray[1-currentState][j]=(double *)malloc(nNL*sizeof(double));
}
bias[currentState]=(double *)malloc(nNL*sizeof(double));
bias[1-currentState]=(double *)malloc(nNL*sizeof(double));
biasChange=(double *)malloc(nNL*sizeof(double));
init=false;
priorList=(Neural **)malloc(nPL*sizeof(Neural *));
nextList=(Neural **)malloc(nNL*sizeof(Neural *));
}
bool Media::Push(Neural * nPointer,int layerID,Flag f){
if(f==fPrior){
if(cursorOfPrior>=numberOfPriorNeural) return false;
layerID=cursorOfPrior;
priorList[cursorOfPrior]=nPointer;
cursorOfPrior++;
return true;
}else if(f==fNext){
if(cursorOfNext>=numberOfNextNeural) return false;
layerID=cursorOfNext;
nextList[cursorOfNext]=nPointer;
cursorOfNext++;
return true;
}else return false;
}
bool Media::InitWeight(){
int i,j;
cout<<"init weight"<<endl;
if(init==true) return false;
for(i=0;i<cursorOfPrior;i++){
for(j=0;j<cursorOfNext;j++){
weightChange[i][j]=0.0;
weightArray[currentState][i][j]=drand()-0.5;
weightArray[1-currentState][i][j]=drand()-0.5;
}
cout<<"init over"<<endl;
}
for(i=0;i<cursorOfNext;i++){
bias[currentState][i]=drand()-0.5;
bias[1-currentState][i]=drand()-0.5;
biasChange[i]=0.0;
cout<<bias[currentState][i]<<" and "<<bias[1-currentState][i]<<" and "<<biasChange[i];
}
cout<<"init OK"<<endl;
init=true;
return true;
}
double Media::GetDerivative(int IDInLayer){
double temp=0;
for(int i=0;i<cursorOfNext;i++){
temp+=(nextList[i]->GetGradient())*weightArray[currentState][IDInLayer][i];
}
return temp;
}
Neural::Neural(NType nt,int ID,Media * priorM,Media * nextM){
IDInLayer=ID;
neuralType=nt;
priorMedia=priorM;
nextMedia=nextM;
gradientUpdate=false;
}
double Neural::GetValue(){
return value;
}
double Neural::GetGradient(){
return localGradient;
}
bool Neural::computeNewGradient(){
//cout<<"derivative="<<nextMedia->GetDerivative(IDInLayer)<<endl;
//cout<<"value="<<value<<endl;
if(neuralType==hiddenLayer){
localGradient=nextMedia->GetDerivative(IDInLayer)*(a-value)*(a+value)*b/a;
gradientUpdate=true;
//cout<<"computedGradient="<<localGradient<<endl;
return true;
}else return false;
}
bool Neural::computeNewGradient(double desiredValue){
//cout<<"derivative="<<desiredValue-value<<endl;
//cout<<"value="<<value<<endl;
if(neuralType==outputLayer){
localGradient=(desiredValue-value)*(a-value)*(a+value)*b/a;
gradientUpdate=true;
//cout<<"computedGradient="<<localGradient<<endl;
return true;
}else return false;
}
bool Neural::SetInputNeuralValues(double inputValue){
if(neuralType!=inputLayer) return false;
value=inputValue;
return true;
}
bool Neural::WeightAdjust(){
if(gradientUpdate==false) return false;
priorMedia->WeightChange(IDInLayer,localGradient);
gradientUpdate=true;
return true;
}
double Neural::ForwardCompute(){
if(neuralType==inputLayer){
return value;
}else {
value=0;
value=priorMedia->ForwardCompute(IDInLayer);
value=a*(exp(2*b*value)-1.0)/(exp(2*b*value)+1.0);
if(value>1) value=1;
if(value<-1) value=-1;
return value;
}
}
class SourceData{
private:
int numberOfExample;
int numberOfInputNeural;
int numberOfOutputNeural;
double ** inputVector;
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -