bp.h

来自「DNA分类」· C头文件 代码 · 共 144 行

H
144
字号
#ifndef BP_H
#define BP_H
#include<string.h>
//#include<iostream.h>
//#include<fstream.h>
#include<math.h>
#include<stdlib.h>
#include<time.h>
#include"util.h"
#include "DNAClassifyBaseClass.h"
#define purelin 0
#define tansig  1
#define logsig  2

#define traingd   0
#define traincgp 1
double logsigf(double in);
double tansigf(double in);
double purelinf(double in);
double glogsigf(double in);
double gtansigf(double in);
double gpurelinf(double in);

class NetLayer;
class NNet;

class NetNode
{
	friend class NetLayer;
	friend class NNet;
private:
	double   output;
	double   preoutput;
	double   error;
    double * pweight;
	int      inputs;
	double * gradient;
	double * lastgradient;
	double * cong;
	double * lastcong;
//	double * step;
	double   delta;
//	double   dstep;
	int      mode;
	double   searchres;
public:
	void InitNetNode(int input);
	NetNode()
	{
		memset(this,0,sizeof(NetNode));
	}
    ~NetNode();
	double Output(NetLayer * input,int funtype);
	void InitWeight();
	bool AdjustWeithts(double learn,int sample);
	bool CalGradient(NetLayer * pre,NetLayer * next,int index,int funtype,int sample);
	bool CalConjuct(double belta,int sample);
	void Reset();
	void SetMode(int nMode);
	void SearchStep(NetLayer * input,int funtype,double learn);
//	bool CalStep(NetLayer * pre,NetLayer * next,int index,int funtype,int sample);
};
class NetLayer
{
	friend class NetNode;
	friend class NNet;
private:
	int Units;
	NetNode * pNode;
	int Inputs;
	NetLayer * pre,*next;
	int  nType;
public:
	NetLayer(int nodes,int input);
	~NetLayer();
	void SetConnect(NetLayer * p1,NetLayer * p2);
	void InitWeight();
	bool Reset();
	bool SimLayer();
    bool SetInputLayer(double * in,int index);
	bool AdjustWeithts(double learn,int sample);
	bool CalGradient(int sample);
	void CalConjuct(double belta,int sample);
	void SetTranFun(int funtype);
	void SetTrainMode(int mode);
	bool Search(double learn);
//    void CalStep(int sample);
};
class NNet
{
private:
	NetLayer * layer;
	NetLayer   * InputLayer; 
	NetLayer * OutputLayer;   // - output layer                       
    double     Alpha;         // - momentum factor                     
//    double     Eta;           // - learning rate                       
    double    *goal;          // - gain of sigmoid function            
    double    *Error;         // - total net error                     
	int        nInput;
	int        nOutput;
	double     fError;
	int        nLayers;
	int        nMode;
	int        samples;
	int        freedom;
protected:
	void Sim(int index);
public:
	NNet(int inputs,int hides,int outputs);
	NNet();
	~NNet();
	void InitNet(int inputs,int hides,int outputs);

	void CalError(int index);
	void AdjustWeights(double learn,int nSample);
	void Train(double * in,double * goal,int nSample,int nEpoch,
		double minmse,double minl,double maxl);
	void SetTranFun(int * funtype);
	bool CalConject(int index,int sample);
	void SetTrainMode(int mode);
	double CalStep(double * in,double min,double max,double & result);
	double Search(double * in,double learn,int index);
	bool   CheckConj();
	void Sim(double * input,double * output);
	bool CheckStep();
};

class DNANNet:public CDNAClassifyBaseClass
{
private:
	NNet  *net[4];
public:
	DNANNet();
	~DNANNet();
public:
	BOOL Training(double **ppSample,int* pClass,int n,int c);
	BOOL Classify(double **ppTestSample,int* pClass,int n,int c);

	CString GetName(){return "BP";}

	void Clear();
};
#endif 

⌨️ 快捷键说明

复制代码Ctrl + C
搜索代码Ctrl + F
全屏模式F11
增大字号Ctrl + =
减小字号Ctrl + -
显示快捷键?