📄 neuron.cpp
字号:
#include "StdAfx.h"
#include <stdlib.h>
NeuralLayer::NeuralLayer(int iNeuronNumber, double dbLearnRate,
double dbActiveLvl, double dbMomentum)
{
m_iNeuronNum = iNeuronNumber;
m_dbLearnRate = dbLearnRate;
m_dbActiveLvl = dbActiveLvl;
m_dbMomentum = dbMomentum;
m_iActiveStep = 0;
m_vecActives.resize(iNeuronNumber);
m_pLeftLayer = NULL;
m_pRightLayer = NULL;
m_pDeltaWeights = NULL;
m_pWeightsMatrix = NULL;
}
NeuralLayer::~NeuralLayer()
{
if (m_pWeightsMatrix)
delete m_pWeightsMatrix;
if (m_pDeltaWeights)
delete m_pDeltaWeights;
}
int NeuralLayer::getNeuronNum()
{
return m_iNeuronNum;
}
void NeuralLayer::setLeftLayer(NeuralLayer *pLeftLayer)
{
m_pLeftLayer = pLeftLayer;
if (m_pLeftLayer)
{
if (m_pWeightsMatrix)
delete m_pWeightsMatrix;
if (m_pDeltaWeights)
delete m_pDeltaWeights;
m_pWeightsMatrix = new DMatrix(m_pLeftLayer->getNeuronNum() + 1,
m_iNeuronNum);
m_pDeltaWeights = new DMatrix(m_pLeftLayer->getNeuronNum() + 1,
m_iNeuronNum, 0.0);
srand(time(NULL));
for (int i = 0; i < m_pLeftLayer->getNeuronNum() + 1; i++)
for (int j = 0; j < m_iNeuronNum; j++)
(*m_pWeightsMatrix)(i, j) = (rand() % 20) / 10.0 - 1;
}
}
void NeuralLayer::setRightLayer(NeuralLayer *pRightLayer)
{
m_pRightLayer = pRightLayer;
}
int NeuralLayer::getActiveStep()
{
return m_iActiveStep;
}
void NeuralLayer::active(DVec& vecOutputs)
{
/* if the active was calculated, return it directly */
vecOutputs.clear();
vecOutputs.insert(vecOutputs.begin(), m_vecActives.begin(), m_vecActives.end());
}
void NeuralLayer::active(DVec &vecInputs, DVec& vecOutputs)
{
/* if it is the leftest layer */
if (m_pLeftLayer == NULL)
{
m_iActiveStep++;
m_vecActives.clear();
m_vecActives.insert(m_vecActives.begin(), vecInputs.begin(), vecInputs.end());
if (m_pRightLayer)
m_pRightLayer->active(vecInputs, vecOutputs);
else
{
vecOutputs.clear();
vecOutputs.insert(vecOutputs.begin(), vecInputs.begin(), vecInputs.end());
}
}
else /* not the leftest layer */
{
if (m_iActiveStep == m_pLeftLayer->getActiveStep())
{
m_pLeftLayer->active(vecInputs, vecOutputs);
return;
}
m_iActiveStep++;
/* calc current layer's activation */
for (int j = 0; j < m_iNeuronNum; j++)
{
m_vecActives[j] = (*m_pWeightsMatrix)(0, j);
for (int i = 0; i < m_pLeftLayer->getNeuronNum(); i++)
m_vecActives[j] += vecInputs[i] * (*m_pWeightsMatrix)(i + 1, j);
m_vecActives[j] = 1 / (1 + exp(-m_vecActives[j] * m_dbActiveLvl));
/*
m_vecActives[j] = P_A * tanh(P_B * m_vecActives[j]);*/
}
/* active next layer */
if (m_pRightLayer)
m_pRightLayer->active(m_vecActives, vecOutputs);
else
{
vecOutputs.clear();
vecOutputs.insert(vecOutputs.begin(), m_vecActives.begin(), m_vecActives.end());
}
}
}
void NeuralLayer::learn(DVec &vecInputs, DVec& vecFeedBack)
{
if (m_pLeftLayer)
{
DVec vecOutputs;
int i, j;
active(vecOutputs);
/* calc the current layer's delta */
for (j = 0; j < m_iNeuronNum; j++)
{
if (m_pRightLayer == NULL)
{
vecFeedBack[j] = vecFeedBack[j] - vecOutputs[j];
}
vecFeedBack[j] *= m_dbActiveLvl * vecOutputs[j] * (1 - vecOutputs[j]);
/*
vecFeedBack[j] *= P_B * (P_A - vecOutputs[j]) * (P_A + vecOutputs[j]) / P_A; */
}
/* prepare the new feedback for left layer */
DVec vecNewFeedBack;
vecNewFeedBack.resize(m_pLeftLayer->getNeuronNum(), 0.0);
for (i = 0; i < m_pLeftLayer->getNeuronNum(); i++)
for (j = 0; j < m_iNeuronNum; j++)
vecNewFeedBack[i] += (*m_pWeightsMatrix)(i + 1, j) * vecFeedBack[j];
/* adjust the b */
for (j = 0; j < m_iNeuronNum; j++)
{
(*m_pDeltaWeights)(0, j) = (*m_pDeltaWeights)(0, j) * m_dbMomentum
+ m_dbLearnRate * vecFeedBack[j];
(*m_pWeightsMatrix)(0, j) += (*m_pDeltaWeights)(0, j);
}
/* adjust the weights */
m_pLeftLayer->active(vecOutputs);
for (i = 0; i < m_pLeftLayer->getNeuronNum(); i++)
{
for (j = 0; j < m_iNeuronNum; j++)
{
(*m_pDeltaWeights)(i + 1, j) = (*m_pDeltaWeights)(i + 1, j) * m_dbMomentum
+ m_dbLearnRate * vecFeedBack[j] * vecOutputs[i];
(*m_pWeightsMatrix)(i + 1, j) += (*m_pDeltaWeights)(i + 1, j);
}
}
/* adjust the lefter layer */
m_pLeftLayer->learn(vecInputs, vecNewFeedBack);
}
}
double NeuralLayer::getWeight(int i, int j)
{
return (*m_pWeightsMatrix)(i + 1, j);
}
double NeuralLayer::getB(int j)
{
return (*m_pWeightsMatrix)(0, j);
}
NeuralNetwork::NeuralNetwork(int iInputSize)
{
m_pInputLayer = new NeuralLayer(iInputSize);
m_pOutputLayer = m_pInputLayer;
m_iLayerNum = 1;
}
NeuralNetwork::~NeuralNetwork()
{
NeuralLayer *pTmp;
while (m_pOutputLayer)
{
pTmp = m_pOutputLayer->getLeftLayer();
delete m_pOutputLayer;
m_pOutputLayer = pTmp;
}
m_pInputLayer = NULL;
}
void NeuralNetwork::appendLayer(int iNeuronNumber, double dbLearnRate,
double dbActiveLvl, double dbMomentum)
{
NeuralLayer *pNeuralLayer = new NeuralLayer(iNeuronNumber, dbLearnRate,
dbActiveLvl, dbMomentum);
pNeuralLayer->setLeftLayer(m_pOutputLayer);
m_pOutputLayer->setRightLayer(pNeuralLayer);
m_pOutputLayer = pNeuralLayer;
m_iLayerNum ++;
}
void NeuralNetwork::active(DVec &vecInputs, DVec& vecOutputs)
{
m_pInputLayer->active(vecInputs, vecOutputs);
}
void NeuralNetwork::learn(DVec &vecInputs, DVec& vecFeedBack)
{
DVec vecTmp;
m_pInputLayer->active(vecInputs, vecTmp);
m_pOutputLayer->learn(vecInputs, vecFeedBack);
}
/* return the w(i, j), and the j is in the layer which iLayerIndex indicated */
double NeuralNetwork::getWeight(int iLayerIndex, int i, int j)
{
NeuralLayer *pNeuralLayer = m_pInputLayer;
while (iLayerIndex && pNeuralLayer)
{
pNeuralLayer = pNeuralLayer->getRightLayer();
iLayerIndex--;
}
if (pNeuralLayer)
return pNeuralLayer->getWeight(i, j);
else
return 0.0;
}
double NeuralNetwork::getB(int iLayerIndex, int j)
{
NeuralLayer *pNeuralLayer = m_pInputLayer;
while (iLayerIndex && pNeuralLayer)
{
pNeuralLayer = pNeuralLayer->getRightLayer();
iLayerIndex--;
}
if (pNeuralLayer)
return pNeuralLayer->getB(j);
else
return 0.0;
}
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -