📄 neuralnet.cpp
字号:
#include "stdafx.h"
#include "NeuralNet.h"
SNeuron::SNeuron(int iNumInputs) : m_iNumInputs(iNumInputs+1)
{
for(int i=0; i<iNumInputs+1;i++)//附加偏移(bias)
{//权值初始化为-1--1之间的随机值
m_vecWeight.push_back(RandomClamped());
}
}
SNeuronLayer::SNeuronLayer(int iNumNeurons, int iNumInputsPerNeuron):m_iNumNeurons(iNumNeurons)
{
for (int i=0; i<iNumNeurons; ++i)
m_vecNeurons.push_back(SNeuron(iNumInputsPerNeuron));
}
CNeuralNet::CNeuralNet()
{
m_iNumInputs=14;
m_iNumOutputs=3;
m_iNumHiddenLayers=1;
m_iNeuronPerHiddenLayer=6;
CreateNet();
}
void CNeuralNet::CreateNet()
{
if(m_iNumHiddenLayers>0)
{
m_vecLayers.push_back(SNeuronLayer(m_iNeuronPerHiddenLayer,m_iNumInputs));//第一隐层
for(int i=0;i<m_iNumHiddenLayers-1;i++)
{
m_vecLayers.push_back(SNeuronLayer(m_iNeuronPerHiddenLayer,m_iNeuronPerHiddenLayer));//每个隐层的神经元数目都相同
}
m_vecLayers.push_back(SNeuronLayer(m_iNumOutputs,m_iNeuronPerHiddenLayer));
}
else
{
m_vecLayers.push_back(SNeuronLayer(m_iNumOutputs,m_iNumInputs));
}
}
vector<double> CNeuralNet::Update(vector<double> &inputs)
{
vector<double> outputs;
int cWeight=0;
if(inputs.size()!=m_iNumInputs)
{
return outputs;//返回一个空
}
for(int i=0; i<m_iNumHiddenLayers+1; i++)//隐层+输出层 对每一层
{
if(i>0)
{
inputs=outputs;
}
outputs.clear();
cWeight=0;
//for each neuron sum the (inputs * corresponding weights).Throw the total at our sigmoid function to get the output.
for(int j=0; j<m_vecLayers[i].m_iNumNeurons; j++)//对每一个神经元
{
double netinput=0;
int NumInputs=m_vecLayers[i].m_vecNeurons[j].m_iNumInputs;
for(int k=0; k<NumInputs-1; k++)//对每一个输入
{
netinput+=m_vecLayers[i].m_vecNeurons[j].m_vecWeight[k]*inputs[cWeight++];//权值*输入
}
netinput+=m_vecLayers[i].m_vecNeurons[j].m_vecWeight[NumInputs-1]*(-1);//权值*阈值 输入为-1的权值,称为偏移(bias)
outputs.push_back(Sigmoid(netinput,1));
cWeight=0;
}
}
return outputs;
}
vector<double> CNeuralNet::GetWeights() const
{
vector<double> weights;
for(int i=0; i<m_iNumHiddenLayers+1; i++)
{
for(int j=0; j<m_vecLayers[i].m_iNumNeurons; j++)
{
for(int k=0; k<m_vecLayers[i].m_vecNeurons[j].m_iNumInputs; k++)
{
weights.push_back(m_vecLayers[i].m_vecNeurons[j].m_vecWeight[k]);
}
}
}
return weights;
}
void CNeuralNet::PutWeights(std::vector<double> &weights)
{
int cWeight=0;
for(int i=0; i<m_iNumHiddenLayers+1; i++)
{
for(int j=0; j<m_vecLayers[i].m_iNumNeurons; j++)
{
for(int k=0; k<m_vecLayers[i].m_vecNeurons[j].m_iNumInputs; k++)
{
m_vecLayers[i].m_vecNeurons[j].m_vecWeight[k]=weights[cWeight++];
}
}
}
return;
}
int CNeuralNet::GetNumberOfWeights() const
{
int weights=0;
for(int i=0; i<m_iNumHiddenLayers+1; i++)
{
for(int j=0; j<m_vecLayers[i].m_iNumNeurons; j++)
{
for(int k=0; k<m_vecLayers[i].m_vecNeurons[j].m_iNumInputs; k++)
{
weights++;
}
}
}
return weights;
}
double CNeuralNet::Sigmoid(double activation, double response)
{
return 1/(1+exp(-activation/response));
}
void CNeuralNet::OutputWeight()
{
ofstream weight("weight.txt");
int numofweights=0;
for(int i=0; i<m_iNumHiddenLayers+1; i++)
{
for(int n=0; n<m_vecLayers[i].m_iNumNeurons; n++)
{
for(int k=0; k<m_vecLayers[i].m_vecNeurons[n].m_iNumInputs; k++)
{
weight<<m_vecLayers[i].m_vecNeurons[n].m_vecWeight[k]<<endl;
numofweights++;
}
}
}
weight<<"权值数目:"<<numofweights;
}
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -