⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 neuralnetwork.cpp

📁 本文件包实现了人工神经网络的实现和建模。采用BP算法。
💻 CPP
字号:
// NeuralNetwork.cpp: implementation of the CNeuralNetwork class.
//
//////////////////////////////////////////////////////////////////////

#include "NeuralNetwork.h"

#include <time.h>
#include <stdlib.h>
#pragma once
//////////////////////////////////////////////////////////////////////
// Construction/Destruction
//////////////////////////////////////////////////////////////////////

CNeuralNetwork::CNeuralNetwork()
{


}


//DEL CNeuralNetwork::CBackProp(int nl, int *sz, double b, double a)
//DEL {
//DEL 
//DEL }

// THE CONSTRUCTION FUNCTION
// -----------------------------------------------------------------------------------
// INPUT:    
// nl       : The layer numbers of the Neural Network 
//            E.G. if input layer + 2 hidden layer + out putlayer, then nl=4.
// *sz      : the first address of array, the array contains the size of each layer
// b        : ?? learning rate 
// a        : ?? momentum parameter
CNeuralNetwork::CNeuralNetwork(int nl, int *sz, double b, double a):beta(b),alpha(a),numl(nl)
{
	//--set the size of each layer--
	lsize=new int[numl];

	for(int i=0;i<numl;i++)
	{
		lsize[i]=sz[i];
	}
	
	//--allocate memory for output of each neuron--
	out = new double*[numl];
	
	for( i=0;i<numl;i++){
		out[i]=new double[lsize[i]];
	}
	
	//--allocate memory for delta--
	delta = new double*[numl];
	
	for(i=1;i<numl;i++){
		delta[i]=new double[lsize[i]];
	}
	
	//--allocate memory for weights--
	weight = new double**[numl];
	
	for(i=1;i<numl;i++){
		weight[i]=new double*[lsize[i]];
	}
	
	for(i=1;i<numl;i++){
		for(int j=0;j<lsize[i];j++){
			weight[i][j]=new double[lsize[i-1]+1];
		}
	}
	
	//--allocate memory for previous weights--
	prevDwt = new double**[numl];
	
	for(i=1;i<numl;i++){
		prevDwt[i]=new double*[lsize[i]];
		
	}
	for(i=1;i<numl;i++){
		for(int j=0;j<lsize[i];j++){
			prevDwt[i][j]=new double[lsize[i-1]+1];
		}
	}
	
	//--seed and assign random weights--
	srand((unsigned)(time(NULL)));
	for(i=1;i<numl;i++)
		for(int j=0;j<lsize[i];j++)
			for(int k=0;k<lsize[i-1]+1;k++)
				weight[i][j][k]=(double)(rand())/(RAND_MAX/2) - 1;
			
	//	initialize previous weights to 0 for first iteration
	for(i=1;i<numl;i++)
		for(int j=0;j<lsize[i];j++)
			for(int k=0;k<lsize[i-1]+1;k++)
				prevDwt[i][j][k]=0.0;
}

// THE DESTRUCTION FUNCTION
// -----------------------------------------------------------------------------------
CNeuralNetwork::~CNeuralNetwork()
{
	delete[] lsize;
	
	for(int i=0;i<numl;i++)
		delete[] out[i];
	delete[] out;
	
	for(i=1;i<numl;i++)
		delete[] delta[i];
	delete[] delta;
	
	for(i=1;i<numl;i++)
		for(int j=0;j<lsize[i];j++)
			delete[] weight[i][j];
		for(i=1;i<numl;i++)
			delete[] weight[i];
		delete[] weight;
		
		for(i=1;i<numl;i++)
			for(int j=0;j<lsize[i];j++)
				delete[] prevDwt[i][j];
			for(i=1;i<numl;i++)
				delete[] prevDwt[i];
			delete[] prevDwt;
}


// SIMULATE A TRAINED NETWORK
// ------------------------------------------------------
// INPUT:    
// in       : certain row of the input data
// OUTPUT:
// out      : output value of each neuron
void CNeuralNetwork::ffwd(double *in)
{
	double sum;                       //sum:  the input of sigmoid function
	
	//--assign content to input layer--
	for(int i=0;i<lsize[0];i++)
	{
		out[0][i]=in[i];
	}

	//--assign output(activation) value to each neuron using sigmoid function
	for(i=1;i<numl;i++)
	{
		for(int j=0;j<lsize[i];j++)
		{	
			sum=0.0;
			for(int k=0;k<lsize[i-1];k++)
			{
				sum+= out[i-1][k]*weight[i][j][k];
			}
			sum+=weight[i][j][lsize[i-1]];
			out[i][j]=sigmoid(sum);	
		}
	}
}

// TRAIN A NETWORK
// -------------------------------------------------
// Learning Algorithm: Steepest Descent
// INPUT:
// in          : certain row of the input data
// tgt         : expectation output value
void CNeuralNetwork::bpgt(double *in, double *tgt)
{
	double sum;

	ffwd(in);
	
	//--find delta for output layer--
	for(int i=0;i<lsize[numl-1];i++)
	{
		delta[numl-1][i]=out[numl-1][i]*(1-out[numl-1][i])*(tgt[i]-out[numl-1][i]);
	}
	
	//--find delta for hidden layers--	
	for(i=numl-2;i>0;i--)
	{
		for(int j=0;j<lsize[i];j++)
		{
			sum=0.0;
			for(int k=0;k<lsize[i+1];k++)
			{
				sum+=delta[i+1][k]*weight[i+1][k][j];
			}
			delta[i][j]=out[i][j]*(1-out[i][j])*sum;
		}
	}
	
	//--apply momentum ( does nothing if alpha=0 )--
	for(i=1;i<numl;i++)
	{
		for(int j=0;j<lsize[i];j++)
		{
			for(int k=0;k<lsize[i-1];k++)
			{
				weight[i][j][k]+=alpha*prevDwt[i][j][k];
			}
			weight[i][j][lsize[i-1]]+=alpha*prevDwt[i][j][lsize[i-1]];
		}
	}
	
	//	adjust weights using steepest descent	
	for(i=1;i<numl;i++)
	{
		for(int j=0;j<lsize[i];j++)
		{
			for(int k=0;k<lsize[i-1];k++)
			{
				prevDwt[i][j][k]=beta*delta[i][j]*out[i-1][k];
				weight[i][j][k]+=prevDwt[i][j][k];
			}
			prevDwt[i][j][lsize[i-1]]=beta*delta[i][j];
			weight[i][j][lsize[i-1]]+=prevDwt[i][j][lsize[i-1]];
		}
	}
}

// MEAN SQUARE ERROR
// -----------------------------------
// INPUT:
// tgt:    expectation output value
// OUTPUT:
// mse:     the mean square error
double CNeuralNetwork::mse(double *tgt) const
{
	double mse=0;
	for(int i=0;i<lsize[numl-1];i++)
	{
		mse+=(tgt[i]-out[numl-1][i])*(tgt[i]-out[numl-1][i]);
	}
	return mse/2;
}

// OUTPUT OF CERTAIN NEURAL
//-------------------------------------
double CNeuralNetwork::Out(int i) const
{
	return out[numl-1][i];
}

// SIGMOID FUNCTION
double CNeuralNetwork::sigmoid(double in)
{
		return 1/(1+exp(-in));
}

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -