⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 neuronetwork.cpp

📁 我以前写的一个的神经网络学习函数逼近和分类的例子
💻 CPP
字号:
#include "stdafx.h"
#include <MATH.H>
#include "NeuroNetwork.h"
#include <FLOAT.H>


//class CNeuron
CNeuron::CNeuron(CNeuroNet* pNet, int iLayerNo, int iNo, double* pInitWeights, double dInitThreshold)
{
	Initialize(pNet, iLayerNo, iNo, pInitWeights, dInitThreshold);
}

CNeuron::CNeuron()
{
	m_iLayerNo = 0;
	m_iNo = 0;

	m_iWeightNum = 1;
	m_iFunType = NEURONET_FUNCTYPE_LINEAR;
	m_dThreshold = 0;
	m_pWeights = NULL;

	m_dInputSum = 0;
	m_dOutput = 0;
	m_dError = 0;

	m_pNet = NULL;
}

void CNeuron::Initialize(CNeuroNet* pNet, int iLayerNo, int iNo, double* pInitWeights, double dInitThreshold)
{
	m_iLayerNo = iLayerNo;
	m_iNo = iNo;

	if (iLayerNo == 0)
	{
		m_iWeightNum = 1;
		m_iFunType = NEURONET_FUNCTYPE_LINEAR;
		m_dThreshold = 0;
		m_pWeights = NULL;
	}
	else
	{
		m_iWeightNum = pNet->m_pLayNodNum[iLayerNo-1];

		if (iLayerNo == pNet->m_iLayNum-1)
		{
			m_iFunType = pNet->m_iOutLayFun;
		}
		else
		{
			m_iFunType = pNet->m_iMidLayFun;
		}

		m_pWeights = new double [m_iWeightNum];
		if (pInitWeights != NULL)
		{
			memcpy(m_pWeights, pInitWeights, m_iWeightNum*8);
		}
		else
		{
			for (int i = 0; i < m_iWeightNum; i++)
			{
				m_pWeights[i] = 0.5;
			}
		}

		if (dInitThreshold != NULL)
			m_dThreshold = dInitThreshold;
		else
			m_dThreshold = 0;
	}

	m_dInputSum = 0;
	m_dOutput = 0;
	m_dError = 0;

	m_pNet = pNet;
}

CNeuron::~CNeuron()
{
	delete [] m_pWeights;
}

void CNeuron::CalcInputSum()
{
	m_dInputSum = 0;
	
	for (int i = 0; i<m_iWeightNum; i++)
	{
		m_dInputSum += m_pNet->m_Layers[m_iLayerNo-1][i].m_dOutput * m_pWeights[i];
	}
	m_dInputSum -= m_dThreshold;
}

void CNeuron::CalcOutput()
{
	switch (m_iFunType)
	{
	case NEURONET_FUNCTYPE_LINEAR:
		m_dOutput = m_dInputSum;
		break;
	case NEURONET_FUNCTYPE_SIGMOID:
		m_dOutput = 1/(1+exp(-m_dInputSum));
		break;
	case NEURONET_FUNCTYPE_WAVELET:
		break;
	default:
		m_dOutput = m_dInputSum;
		break;		
	}
}

BOOL CNeuron::AdjustWeights()
{
	double dDeriv = 0;
	double dDelta = 0;

	switch (m_iFunType)
	{
	case NEURONET_FUNCTYPE_LINEAR:
		dDeriv = 1;
		break;
	case NEURONET_FUNCTYPE_SIGMOID:
		dDeriv = exp(-m_dInputSum)/pow((1+exp(-m_dInputSum)),2);
		break;
	case NEURONET_FUNCTYPE_WAVELET:
		break;
	default:
		dDeriv = 1;
		break;
	}

	if (m_iLayerNo == m_pNet->m_iLayNum-1)
	{
		m_dError = m_pNet->m_pExpOutput[m_iNo] - m_dOutput;
		if (!_finite(m_dError))
		{
			MessageBox(NULL,"不合适的初值导致网络训练不能继续!","",MB_OK|MB_ICONERROR|MB_TOPMOST);
			return FALSE;
		}
	}
	else
	{
		m_dError = 0;
		int iForwardLayNodNum = m_pNet->m_pLayNodNum[m_iLayerNo+1];
		for (int i = 0; i < iForwardLayNodNum; i++)
		{
			m_dError += m_pNet->m_Layers[m_iLayerNo+1][i].m_dError * m_pNet->m_Layers[m_iLayerNo+1][i].m_pWeights[m_iNo];
			if (!_finite(m_dError))
			{
				MessageBox(NULL,"不合适的初值导致网络训练不能继续!","",MB_OK|MB_ICONERROR|MB_TOPMOST);
				return FALSE;
			}
		}
	}

	for (int i = 0; i < m_iWeightNum; i++)
	{
		dDelta = m_pNet->m_dDelta * m_dError * dDeriv * m_pNet->m_Layers[m_iLayerNo-1][i].m_dOutput;
		m_pWeights[i] += dDelta;
	}

	dDelta = m_pNet->m_dDelta * m_dError * dDeriv * (-1);
	m_dThreshold += dDelta;

	return TRUE;
}

/////////////////////////////////////////////////////////////////////////////
//class CNeuronNet
///////////////////////////////////////////////////////


CNeuroNet::CNeuroNet()		//this default constructor is for subclass only.
{
	m_pLayNodNum = NULL;
	m_iLayNum = 0;

	m_iMidLayFun = 0;
	m_iOutLayFun = 0;
	m_dDelta = 0;

	m_pInput = NULL;
	m_pExpOutput = NULL;
	m_Layers = NULL;
}

CNeuroNet::CNeuroNet(int* pLayNodNum, int iLayNum, int iMidLayFun, int iOutLayFun, double dDelta)
{
	int l, i;
	srand( (unsigned)time( NULL ) );

	m_pLayNodNum = new int [iLayNum];
	memcpy(m_pLayNodNum, pLayNodNum, sizeof(int)*iLayNum);
	m_iLayNum = iLayNum;

	m_iMidLayFun = iMidLayFun;
	m_iOutLayFun = iOutLayFun;
	m_dDelta = dDelta;

	m_pInput = new double [pLayNodNum[0]];
	m_pExpOutput = new double [pLayNodNum[iLayNum-1]];

	m_Layers = new CNeuron* [iLayNum];
//	m_Layers[0] = NULL;
	for (l = 0; l < iLayNum; l++)
	{
		int iNodNum = m_pLayNodNum[l];
		m_Layers[l] = new CNeuron [iNodNum];

		for (i = 0; i < iNodNum; i++)
		{
			if (l ==0)
				m_Layers[l][i].Initialize(this, l, i, NULL, 0);
			else
			{
				int iWgtNum = pLayNodNum[l-1];
				double* pInitWeights = new double [iWgtNum];
				GenerateInitWeights(pInitWeights, iWgtNum);
				m_Layers[l][i].Initialize(this, l, i, pInitWeights, rand()/RAND_MAX);
				delete pInitWeights;
			}
		}
	}	
}

CNeuroNet::~CNeuroNet()
{
	for (int l = 0; l < m_iLayNum; l++)
	{
		delete [] m_Layers[l];
	}
	delete [] m_Layers;

	delete [] m_pLayNodNum;

	delete [] m_pInput;

	delete [] m_pExpOutput;
}

void CNeuroNet::GenerateInitWeights(double* pWeights, int iWeightNum)
{
	for (int i = 0; i < iWeightNum; i++)
	{
		pWeights[i] = rand()/double(RAND_MAX) - 0.5;
	}
}

void CNeuroNet::SetInput(double* pInputVector)
{
	for (int i = 0; i < m_pLayNodNum[0]; i++)
	{
		m_pInput[i] = pInputVector[i];
		m_Layers[0][i].m_dOutput = m_pInput[i];
	}
}

void CNeuroNet::SetExpOutput(double* pExpOutputVector)
{
	for (int i = 0; i < m_pLayNodNum[m_iLayNum-1]; i++)
	{
		m_pExpOutput[i] = pExpOutputVector[i];
	}
}

void CNeuroNet::ForwardCalc()
{
	int l, i;
	//forward computing
	for (l = 1; l < m_iLayNum; l++)
	{
		for (i =0; i < m_pLayNodNum[l]; i++)
		{
			m_Layers[l][i].CalcInputSum();
			m_Layers[l][i].CalcOutput();
		}
	}
}

BOOL CNeuroNet::BackwardCalc()
{
	int l, i;

	//backward computing
	for (l = m_iLayNum-1; l > 0; l--)
	{
		for (i =0; i < m_pLayNodNum[l]; i++)
		{
			if (!m_Layers[l][i].AdjustWeights())
				return FALSE;
		}
	}

	return TRUE;
}

BOOL CNeuroNet::TrainOneTime()
{
	ForwardCalc();
	return BackwardCalc();
}

BOOL CNeuroNet::Train(int iTrainTimes)
{
	for (int i = 0; i < iTrainTimes; i++)
	{
		if (!TrainOneTime())
			return FALSE;
	}

	return TRUE;
}

void CNeuroNet::GetOutput(double* pOutputVector)
{
	for (int i = 0; i < m_pLayNodNum[m_iLayNum-1]; i++)
	{
		pOutputVector[i] = m_Layers[m_iLayNum-1][i].m_dOutput;
	}
}


/////////////////////////////////////////////////////////////////////////////
// CNeuroIT
BOOL CNeuronIT::AdjustWeights()
{
	double dDeriv = 0;
	double dDelta = 0;

	for (int i = 0; i < m_iWeightNum; i++)
	{
		dDelta = m_pNet->m_dDelta * (m_pNet->m_Layers[m_iLayerNo-1][i].m_dOutput - m_pWeights[i]);
		m_pWeights[i] += dDelta;
	}

	//NormalizeWeight();		//////////////////////////////////

	return TRUE;
}

//calculate the distance between the neuron's weights and its input vector.
double CNeuronIT::CalcDistance()
{
	double dDist = 0;

	for (int i = 0; i < m_iWeightNum; i++)
	{
		dDist += pow((m_pNet->m_Layers[m_iLayerNo-1][i].m_dOutput - m_pWeights[i]),2);
	}

	return dDist;
}

BOOL CNeuronIT::NormalizeWeight()
{
	double dDist = 0;
	int i;

	for (i = 0; i < m_iWeightNum; i++)
	{
		dDist += pow(m_pWeights[i], 2);
	}

	dDist = sqrt(dDist);
	for (i = 0; i < m_iWeightNum; i++)
	{
		m_pWeights[i] = m_pWeights[i]/dDist;
	}

	return TRUE;
}

/////////////////////////////////////////////////////////////////////////////
// CNeuroNetIT
CNeuroNetIT::CNeuroNetIT(int iInLayNodNum, int iOutLayNodNum, double dDelta)
{
	int l, i;
	srand( (unsigned)time( NULL ) );

	int iLayNum = 2;
//	iMidLayFun = NEURONET_FUNCTYPE_LINEAR;
	int iOutLayFun = NEURONET_FUNCTYPE_LINEAR;

	m_pLayNodNum = new int [iLayNum];
	//memcpy(m_pLayNodNum, pLayNodNum, sizeof(int)*iLayNum);
	m_pLayNodNum[0] = iInLayNodNum;
	m_pLayNodNum[1] = iOutLayNodNum;
	m_iLayNum = iLayNum;

//	m_iMidLayFun = iMidLayFun;
	m_iOutLayFun = iOutLayFun;
	m_dDelta = dDelta;

	m_pInput = new double [iInLayNodNum];
	m_pExpOutput = new double [iOutLayNodNum];

	m_Layers = (CNeuron**)(new CNeuronIT* [iLayNum]);
//	m_Layers[0] = NULL;
	for (l = 0; l < iLayNum; l++)
	{
		int iNodNum = m_pLayNodNum[l];
		m_Layers[l] = new CNeuronIT [iNodNum];

		for (i = 0; i < iNodNum; i++)
		{
			if (l ==0)
				m_Layers[l][i].Initialize(this, l, i, NULL, 0);
			else
			{
				int iWgtNum = m_pLayNodNum[l-1];
				double* pInitWeights = new double [iWgtNum];
				GenerateInitWeights(pInitWeights, iWgtNum);
				m_Layers[l][i].Initialize(this, l, i, pInitWeights, 0);
				//((CNeuronIT*)&m_Layers[l][i])->NormalizeWeight();	////////////////////
				delete pInitWeights;
			}
		}
	}
	
}

CNeuroNetIT::~CNeuroNetIT()
{
}

BOOL CNeuroNetIT::TrainOneTime()
{
	ForwardCalc();
	int i = GetWinNeuron();
	if (!((CNeuronIT*)&m_Layers[m_iLayNum-1][i])->AdjustWeights())
		return FALSE;

	return TRUE;
}

BOOL CNeuroNetIT::Train(int iTrainTimes)
{
	for (int i = 0; i < iTrainTimes; i++)
	{
		if (!TrainOneTime())
			return FALSE;
	}

	return TRUE;
}

int CNeuroNetIT::GetWinNeuron()
{
	int i;
	int iMin = 0;

	//backward computing
	double* pDist = new double[m_pLayNodNum[m_iLayNum-1]];
	for (i =0; i < m_pLayNodNum[m_iLayNum-1]; i++)
	{
		pDist[i] = ((CNeuronIT*)&m_Layers[m_iLayNum-1][i])->CalcDistance();
	}

	double dDistMin = 2;
	for (i =0; i < m_pLayNodNum[m_iLayNum-1]; i++)
	{
		if (pDist[i] < dDistMin)
		{
			dDistMin = pDist[i];
			iMin = i;
		}
	}

	delete [] pDist;
	
	return iMin;
}

BOOL CNeuroNetIT::NormalizeVector(double* pVector, int iSize)
{
	double dDist = 0;
	int i;

	for (i = 0; i < iSize; i++)
	{
		dDist += pow(pVector[i], 2);
	}

	dDist = sqrt(dDist);
	for (i = 0; i < iSize; i++)
	{
		pVector[i] = pVector[i]/dDist;
	}

	return TRUE;
}

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -