📄 neuronetwork.cpp
字号:
#include "stdafx.h"
#include <MATH.H>
#include "NeuroNetwork.h"
#include <FLOAT.H>
//////////////////////////////////////////////////////////////////////////////
// class CNeuron, CNeuroNet, CNeuronBP, CNeuroNetBP, CNeuronIT,
// CNeuroNetIT implementation
// Guo.X.W
// 2005.1
/////////////////////////////////////////////
/////////////////////////
//class CNeuron
//CNeuron::CNeuron(CNeuroNet* pNet, int iLayerNo, int iNo, double* pInitWeights, double dInitThreshold)
//{
// Initialize(pNet, iLayerNo, iNo, pInitWeights, dInitThreshold);
//}
CNeuron::CNeuron()
{
m_iLayerNo = 0;
m_iNo = 0;
m_iWeightNum = 1;
m_iFunType = NEURONET_FUNCTYPE_LINEAR;
m_dThreshold = 0;
m_pWeights = NULL;
m_dInputSum = 0;
m_dOutput = 0;
m_dError = 0;
m_pNet = NULL;
}
void CNeuron::Initialize(CNeuroNet* pNet, int iLayerNo, int iNo, double* pInitWeights, double dInitThreshold)
{
m_iLayerNo = iLayerNo;
m_iNo = iNo;
if (iLayerNo == 0)
{
m_iWeightNum = 1;
m_iFunType = NEURONET_FUNCTYPE_LINEAR;
m_dThreshold = 0;
m_pWeights = NULL;
}
else
{
m_iWeightNum = pNet->GetLayNodNum(iLayerNo-1);
if (iLayerNo == pNet->GetLayNum()-1)
{
m_iFunType = pNet->GetOutLayFun();
}
else
{
m_iFunType = pNet->GetMidLayFun();
}
m_pWeights = new double [m_iWeightNum];
if (pInitWeights != NULL)
{
memcpy(m_pWeights, pInitWeights, m_iWeightNum*8);
}
else
{
for (int i = 0; i < m_iWeightNum; i++)
{
m_pWeights[i] = 0.5;
}
}
if (dInitThreshold != NULL)
m_dThreshold = dInitThreshold;
else
m_dThreshold = 0;
}
m_dInputSum = 0;
m_dOutput = 0;
m_pNet = pNet;
}
CNeuron::~CNeuron()
{
delete [] m_pWeights;
}
void CNeuron::CalcInputSum()
{
m_dInputSum = 0;
for (int i = 0; i<m_iWeightNum; i++)
{
m_dInputSum += m_pNet->NeuronAt(m_iLayerNo-1,i)->m_dOutput * m_pWeights[i];
}
m_dInputSum -= m_dThreshold;
}
void CNeuron::CalcOutput()
{
switch (m_iFunType)
{
case NEURONET_FUNCTYPE_LINEAR:
m_dOutput = m_dInputSum;
break;
case NEURONET_FUNCTYPE_SIGMOID:
m_dOutput = 1/(1+exp(-m_dInputSum));
break;
case NEURONET_FUNCTYPE_WAVELET:
break;
default:
m_dOutput = m_dInputSum;
break;
}
}
void CNeuron::GenerateWeights()
{
for (int i = 0; i < m_iWeightNum; i++)
{
m_pWeights[i] = rand()/double(RAND_MAX);
}
m_dThreshold = rand()/double(RAND_MAX);
}
void CNeuron::OffsetWeights(double dOffset)
{
for (int i = 0; i < m_iWeightNum; i++)
{
m_pWeights[i] += dOffset;
}
m_dThreshold += dOffset;
}
/////////////////////////////////////////////////////////////////////////////
// CNeuronBP
CNeuronBP::CNeuronBP()
{
}
CNeuronBP::~CNeuronBP()
{
}
BOOL CNeuronBP::AdjustWeights()
{
double dDeriv = 0;
double dDelta = 0;
switch (m_iFunType)
{
case NEURONET_FUNCTYPE_LINEAR:
dDeriv = 1;
break;
case NEURONET_FUNCTYPE_SIGMOID:
dDeriv = exp(-m_dInputSum)/pow((1+exp(-m_dInputSum)),2);
break;
case NEURONET_FUNCTYPE_WAVELET:
break;
default:
dDeriv = 1;
break;
}
if (m_iLayerNo == m_pNet->GetLayNum()-1)
{
m_dError = ((CNeuroNetBP*)m_pNet)->GetExpOutput()[m_iNo] - m_dOutput;
if (!_finite(m_dError))
{
MessageBox(NULL,"不合适的初值导致网络训练不能继续!","",MB_OK|MB_ICONERROR|MB_TOPMOST);
return FALSE;
}
}
else
{
m_dError = 0;
int iForwardLayNodNum = m_pNet->GetLayNodNum(m_iLayerNo+1);
for (int i = 0; i < iForwardLayNodNum; i++)
{
m_dError += m_pNet->NeuronAt(m_iLayerNo+1,i)->m_dError * m_pNet->NeuronAt(m_iLayerNo+1,i)->m_pWeights[m_iNo];
if (!_finite(m_dError))
{
MessageBox(NULL,"不合适的初值导致网络训练不能继续!","",MB_OK|MB_ICONERROR|MB_TOPMOST);
return FALSE;
}
}
}
for (int i = 0; i < m_iWeightNum; i++)
{
dDelta = m_pNet->GetLearnFactor() * m_dError * dDeriv * m_pNet->NeuronAt(m_iLayerNo-1,i)->m_dOutput;
m_pWeights[i] += dDelta;
}
dDelta = m_pNet->GetLearnFactor() * m_dError * dDeriv * (-1);
m_dThreshold += dDelta;
return TRUE;
}
/////////////////////////////////////////////////////////////////////////////
// CNeuroIT
BOOL CNeuronIT::AdjustWeights()
{
double dDeriv = 0;
double dDelta = 0;
for (int i = 0; i < m_iWeightNum; i++)
{
dDelta = m_pNet->GetLearnFactor() * (m_pNet->NeuronAt(m_iLayerNo-1,i)->m_dOutput - m_pWeights[i]);
m_pWeights[i] += dDelta;
}
//NormalizeWeight(); //////////////////////////////////
return TRUE;
}
//calculate the distance between the neuron's weights and its input vector.
double CNeuronIT::CalcDistance()
{
double dDist = 0;
for (int i = 0; i < m_iWeightNum; i++)
{
dDist += pow((m_pNet->NeuronAt(m_iLayerNo-1,i)->m_dOutput - m_pWeights[i]),2);
}
return dDist;
}
BOOL CNeuronIT::NormalizeWeight()
{
double dDist = 0;
int i;
for (i = 0; i < m_iWeightNum; i++)
{
dDist += pow(m_pWeights[i], 2);
}
dDist = sqrt(dDist);
for (i = 0; i < m_iWeightNum; i++)
{
m_pWeights[i] = m_pWeights[i]/dDist;
}
return TRUE;
}
/////////////////////////////////////////////////////////////////////////////
// CNeuroSOM
BOOL CNeuronSOM::AdjustWeights(double dFactor)
{
double dDeriv = 0;
double dDelta = 0;
for (int i = 0; i < m_iWeightNum; i++)
{
dDelta = m_pNet->GetLearnFactor() * dFactor * (m_pNet->NeuronAt(m_iLayerNo-1,i)->m_dOutput - m_pWeights[i]);
m_pWeights[i] += dDelta;
}
//NormalizeWeight(); //////////////////////////////////
return TRUE;
}
/////////////////////////////////////////////////////////////////////////////
//class CNeuronNet
///////////////////////////////////////////////////////
CNeuroNet::CNeuroNet() //this default constructor is for subclass only.
{
m_pLayNodNum = NULL;
m_iLayNum = 0;
m_iMidLayFun = 0;
m_iOutLayFun = 0;
m_dDelta = 0;
m_pInput = NULL;
m_pLayers = NULL;
}
CNeuroNet::Initialize(CNeuron*** pLayers, int* pLayNodNum, int iLayNum, int iMidLayFun, int iOutLayFun, double dDelta)
{
int l, i;
m_pLayers = pLayers;
srand( (unsigned)time( NULL ) );
m_pLayNodNum = new int [iLayNum];
memcpy(m_pLayNodNum, pLayNodNum, sizeof(int)*iLayNum);
m_iLayNum = iLayNum;
m_iMidLayFun = iMidLayFun;
m_iOutLayFun = iOutLayFun;
m_dDelta = dDelta;
m_pInput = new double [pLayNodNum[0]];
m_pLayers = pLayers;
for (l = 0; l < iLayNum; l++)
{
for (i = 0; i < m_pLayNodNum[l]; i++)
{
// if (l ==0)
(*m_pLayers)[l][i].Initialize(this, l, i, NULL, 0);
// else
// {
// int iWgtNum = pLayNodNum[l-1];
// double* pInitWeights = new double [iWgtNum];
// GenerateInitWeights(pInitWeights, iWgtNum);
// (*m_pLayers)[l][i].Initialize(this, l, i, pInitWeights, rand()/RAND_MAX);
// delete pInitWeights;
// }
}
}
GenerateInitWeights();
}
CNeuroNet::~CNeuroNet()
{
m_pLayers = NULL;
delete [] m_pLayNodNum;
delete [] m_pInput;
}
void CNeuroNet::GenerateInitWeights()
{
for (int l = 1; l < m_iLayNum; l++)
{
for (int i = 0; i < m_pLayNodNum[l]; i++)
{
(*m_pLayers)[l][i].GenerateWeights();
}
}
}
void CNeuroNet::OffsetWeights(double dOffset)
{
for (int l = 1; l < m_iLayNum; l++)
{
for (int i = 0; i < m_pLayNodNum[l]; i++)
{
(*m_pLayers)[l][i].OffsetWeights(dOffset);
}
}
}
void CNeuroNet::SetInput(double* pInputVector)
{
for (int i = 0; i < m_pLayNodNum[0]; i++)
{
m_pInput[i] = pInputVector[i];
(*m_pLayers)[0][i].m_dOutput = m_pInput[i];
}
}
void CNeuroNet::ForwardCalc()
{
int l, i;
//forward computing
for (l = 1; l < m_iLayNum; l++)
{
for (i =0; i < m_pLayNodNum[l]; i++)
{
(*m_pLayers)[l][i].CalcInputSum();
(*m_pLayers)[l][i].CalcOutput();
}
}
}
BOOL CNeuroNet::TrainOneTime()
{
ForwardCalc();
return BackwardCalc();
}
BOOL CNeuroNet::Train(int iTrainTimes)
{
for (int i = 0; i < iTrainTimes; i++)
{
if (!TrainOneTime())
return FALSE;
}
return TRUE;
}
int CNeuroNet::GetLayNodNum(int iLayNo)
{
return m_pLayNodNum[iLayNo];
}
int CNeuroNet::GetLayNum()
{
return m_iLayNum;
}
int CNeuroNet::GetOutLayFun()
{
return m_iOutLayFun;
}
int CNeuroNet::GetMidLayFun()
{
return m_iMidLayFun;
}
CNeuron* CNeuroNet::NeuronAt(int iLayNo, int iNo)
{
return &((*m_pLayers)[iLayNo][iNo]);
}
double CNeuroNet::GetLearnFactor()
{
return m_dDelta;
}
void CNeuroNet::SetLearnFactor(double dLearnFactor)
{
m_dDelta = dLearnFactor;
}
/////////////////////////////////////////////////////////////////////////////
// CNeuroNetBP
CNeuroNetBP::CNeuroNetBP()
{
m_Layers = NULL;
m_pExpOutput = NULL;
}
CNeuroNetBP::CNeuroNetBP(int* pLayNodNum, int iLayNum, int iMidLayFun, int iOutLayFun, double dDelta)
{
int l;
m_Layers = new CNeuronBP* [iLayNum];
for (l = 0; l < iLayNum; l++)
{
int iNodNum = pLayNodNum[l];
m_Layers[l] = new CNeuronBP [iNodNum];
}
m_pExpOutput = new double [pLayNodNum[iLayNum-1]];
//CNeuroNet::CNeuroNet((CNeuron***)&m_Layers, pLayNodNum, iLayNum, iMidLayFun, iOutLayFun, dDelta);
Initialize((CNeuron***)&m_Layers, pLayNodNum, iLayNum, iMidLayFun, iOutLayFun, dDelta);
OffsetWeights(-0.5);
}
CNeuroNetBP::~CNeuroNetBP()
{
if (m_Layers != NULL)
{
for (int l = 0; l < GetLayNum(); l++)
delete [] m_Layers[l];
delete [] m_Layers;
}
if (m_pExpOutput != NULL)
delete [] m_pExpOutput;
}
BOOL CNeuroNetBP::BackwardCalc()
{
int l, i;
//backward computing
for (l = GetLayNum()-1; l > 0; l--)
{
for (i =0; i < GetLayNodNum(l); i++)
{
if (!NeuronAt(l,i)->AdjustWeights())
return FALSE;
}
}
return TRUE;
}
void CNeuroNetBP::SetExpOutput(double* pExpOutputVector)
{
for (int i = 0; i < GetLayNodNum(GetLayNum()-1); i++)
{
m_pExpOutput[i] = pExpOutputVector[i];
}
}
void CNeuroNetBP::GetOutput(double* pOutputVector)
{
int iLayNum = GetLayNum();
for (int i = 0; i < GetLayNodNum(iLayNum-1); i++)
{
pOutputVector[i] = NeuronAt(iLayNum-1,i)->m_dOutput;
}
}
double* CNeuroNetBP::GetExpOutput()
{
return m_pExpOutput;
}
/////////////////////////////////////////////////////////////////////////////
// CNeuroNetIT
CNeuroNetIT::CNeuroNetIT()
{
m_Layers = NULL;
}
CNeuroNetIT::CNeuroNetIT(int iInLayNodNum, int iOutLayNodNum, double dDelta)
{
int l;
int* pLayNodNum = new int [2];
pLayNodNum[0] = iInLayNodNum;
pLayNodNum[1] = iOutLayNodNum;
m_Layers = new CNeuronIT* [2];
for (l = 0; l < 2; l++)
{
int iNodNum = pLayNodNum[l];
m_Layers[l] = new CNeuronIT [iNodNum];
}
//CNeuroNet::CNeuroNet((CNeuron***)&m_Layers, m_pLayNodNum, 2, 0, NEURONET_FUNCTYPE_LINEAR, dDelta);
Initialize((CNeuron***)&m_Layers, pLayNodNum, 2, 0, NEURONET_FUNCTYPE_LINEAR, dDelta);
delete [] pLayNodNum;
}
CNeuroNetIT::~CNeuroNetIT()
{
if (m_Layers != NULL)
{
for (int l =0; l < 2; l++)
delete [] m_Layers[l];
delete [] m_Layers;
}
}
BOOL CNeuroNetIT::BackwardCalc()
{
int i = GetWinNeuron();
if (!NeuronAt(1,i)->AdjustWeights())
return FALSE;
return TRUE;
}
int CNeuroNetIT::GetOutput()
{
return GetWinNeuron();
}
int CNeuroNetIT::GetWinNeuron()
{
int i;
int iMin = 0;
//backward computing
double* pDist = new double[GetLayNodNum(1)];
for (i =0; i < GetLayNodNum(1); i++)
{
pDist[i] = ((CNeuronIT*)NeuronAt(1,i))->CalcDistance();
}
double dDistMin = 2;
for (i =0; i < GetLayNodNum(1); i++)
{
if (pDist[i] < dDistMin)
{
dDistMin = pDist[i];
iMin = i;
}
}
delete [] pDist;
return iMin;
}
BOOL CNeuroNetIT::NormalizeVector(double* pVector, int iSize)
{
double dDist = 0;
int i;
for (i = 0; i < iSize; i++)
{
dDist += pow(pVector[i], 2);
}
dDist = sqrt(dDist);
for (i = 0; i < iSize; i++)
{
pVector[i] = pVector[i]/dDist;
}
return TRUE;
}
/////////////////////////////////////////////////////////////////////////////
// CNeuroNetSOM
CNeuroNetSOM::CNeuroNetSOM()
{
m_Layers = NULL;
m_iOutPlaneWidth = 0;
m_dRadius = 0;
}
CNeuroNetSOM::CNeuroNetSOM(int iInLayNodNum, int iOutPlaneWidth, double dDelta, double dRadius)
{
m_iOutPlaneWidth = iOutPlaneWidth;
m_dRadius = dRadius;
int* pLayNodNum = new int [2];
pLayNodNum[0] = iInLayNodNum;
pLayNodNum[1] = (int)pow(iOutPlaneWidth,2);
m_Layers = new CNeuronSOM* [2];
for (int l = 0; l < 2; l++)
{
int iNodNum = pLayNodNum[l];
m_Layers[l] = new CNeuronSOM [iNodNum];
}
Initialize((CNeuron***)&m_Layers, pLayNodNum, 2, 0, NEURONET_FUNCTYPE_LINEAR, dDelta);
delete [] pLayNodNum;
}
CNeuroNetSOM::~CNeuroNetSOM()
{
if (m_Layers != NULL)
{
for (int l = 0; l < 2; l++)
delete [] m_Layers[l];
delete [] m_Layers;
}
}
BOOL CNeuroNetSOM::BackwardCalc()
{
int iWin = GetWinNeuron();
int iRow0, iCol0, iRow, iCol;
iRow0 = iWin/m_iOutPlaneWidth;
iCol0 = iWin%m_iOutPlaneWidth;
for ( int i = 0; i < GetLayNodNum(1); i++)
{
iRow = i/m_iOutPlaneWidth;
iCol = i%m_iOutPlaneWidth;
double dDist = sqrt(pow(iRow-iRow0,2) + pow(iCol-iCol0,2));
if (dDist <= m_dRadius)
{
if (!((CNeuronSOM*)NeuronAt(1,i))->AdjustWeights(1-dDist/m_dRadius))
return FALSE;
}
}
return TRUE;
}
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -