⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 node.cpp

📁 函数逼近
💻 CPP
字号:
/*
 * 
 *	 Copyright 2004-2006 Ghassan OREIBY
 *   
 * 	 This file is part of Neurality.
 *
 *   Neurality is free software; you can redistribute it and/or modify
 *   it under the terms of the GNU General Public License as published by
 *   the Free Software Foundation; either version 2 of the License, or
 *   (at your option) any later version.
 *
 *   Neurality is distributed in the hope that it will be useful,
 *   but WITHOUT ANY WARRANTY; without even the implied warranty of
 *   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 *   GNU General Public License for more details.
 *
 *  You should have received a copy of the GNU General Public License
 *   along with Neurality; if not, write to the Free Software
 *   Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA  02110-1301  USA
 *
 * 
 */


#include "NeuralNetwork.h"
#include "node.h"


//real	CNode::ETA		= .9f;
//real	CNode::ALPHA	= .75f;
//real	CNode::BETA		= .2f;
//real	CNode::KAPPA	= .01f;
//real	CNode::XI		= .07f;
real ETA	= .9f;
real ALPHA	= .75f;
real BETA	= .2f;
real KAPPA	= .01f;
real XI		= .7f;


CNode::CNode(void):nodes(NULL),reOutput(0),reLinearSum(0)
, nConnection(0)
{
}

CNode::~CNode(void)
{
	NodeStruct	*nodestruct = nodes;

	while (nodes != NULL)
	{
		nodestruct = nodes->Next;
		delete nodes;
		nodes = nodestruct;
	}
}


// Return the output of the node (to be used by other nodes).
real CNode::GetNodeOutput(void) const
{
	return reOutput;
}

// Add a backward connection
bool CNode::MakeConnection(const CNode* const backNode, real reWeight)
{
	NodeStruct	*tempnodestruct = NULL, *nodestruct = nodes;
	//tempnodestruct = nodes;
	while (nodestruct	!= NULL)
	{
		if (nodestruct->Node == backNode)
			return false;	//means to get out coz this node already in connection
		tempnodestruct = nodestruct;
		nodestruct = nodestruct->Next;
	}
	nodestruct = new NodeStruct;
	nodestruct->Next = NULL;
	nodestruct->Node = (CNode *)backNode;
	nodestruct->reWeight = reWeight;
	nodestruct->reEta = ETA;
	nodestruct->reOldWeight = 0;
	nodestruct->rePartDer = 0;
	nodestruct->reOldPartDer = 0;
	nodestruct->reSji = 0;
	nodestruct->reOldSji = 0;
	if (tempnodestruct != NULL)
		tempnodestruct->Next = nodestruct;
	else nodes = nodestruct;
	nConnection++;
	return true;				//means that the requested changes has been done successfully.
}

// Edit the weight of the connection with the parameter node.
bool CNode::EditConnection(const CNode* const Node, real reWeight)
{
	NodeStruct	*tempnodestruct;
	tempnodestruct = nodes;
	while (tempnodestruct != NULL)
	{
		if (tempnodestruct->Node == Node)
		{
			tempnodestruct->reWeight = reWeight;
			return true;	//means that the requested changes has been done successfully.
		}
		tempnodestruct = tempnodestruct->Next;
	}
	return false;				//means that the requested node does not exits to execute the requested changes.
}

// Remove the Connection with the parameter Node.
bool CNode::RemoveConnection(const CNode* const Node)
{
	NodeStruct	*tempnodestruct, *nodestruct;
	tempnodestruct = nodes;
	if (tempnodestruct != NULL)
		while (tempnodestruct->Next != NULL)
		{
			if (tempnodestruct->Next->Node == Node)
			{
				nodestruct = tempnodestruct->Next->Next;
				delete tempnodestruct->Next;
				tempnodestruct->Next = nodestruct;
				nConnection--;
				return true;	//means that the requested changes has been done successfully.
			}
			else tempnodestruct = tempnodestruct->Next;
		}
	else return false;
	if ((tempnodestruct->Next == NULL) && (tempnodestruct->Node == Node))
	{
		delete tempnodestruct;
		tempnodestruct = NULL;
		nodes = NULL;
		nConnection--;
		return true;
	}			
	return false;				//means that the requested node does not exits to execute the requested changes.
}

// The Activation function of the current Node
real CNode::ActivationFunction(const real &reLinearsum)
{
	//reOutput = (pow(1.0 + exp(-float(reLinearsum)), -1.0));	// The Sigmoid function
	//return reOutput;
	return powf(1.0f + expf(-(reLinearsum)), -1.0f);
}

// Get the linear sum of the output of the node
real CNode::GetLinearSum(const NodeStruct* const	nodestruct)
{
	if (nodestruct != NULL)
		if (nodestruct->Next == NULL)
			reLinearSum = nodestruct->Node->GetNodeOutput() * nodestruct->reWeight;
		else reLinearSum = GetLinearSum(nodestruct->Next) + nodestruct->Node->GetNodeOutput() * nodestruct->reWeight;
	else reLinearSum = 0;
	return reLinearSum;
}

// Compute the corresponding output
real CNode::ComputeOutput(void)
{
	reOutput = ActivationFunction(GetLinearSum(nodes));
	return reOutput;
}

// Give The ability to set the output (for input nodes)
void CNode::SetOutput(real reOut)
{
	reOutput = reOut;
}


//////////////////////////////////////////////////////////
//		BackPropagation Part							//
//////////////////////////////////////////////////////////


// Retrun the value of the error gradient (for the learning algorithm)
real CNode::GetDelta(void) const
{
	return reDelta;
}

// Propagate the error backward
void CNode::backPropagateError(void)
{
	NodeStruct	*tempNodeStruct = nodes;
    
	ComputeDelta();
	while (tempNodeStruct)
	{
		tempNodeStruct->rePartDer -= reDelta * tempNodeStruct->Node->GetNodeOutput();
		tempNodeStruct->Node->AddToError(tempNodeStruct->reWeight * reDelta);
		tempNodeStruct = tempNodeStruct->Next;
	}
}

// Sets the error (reError) to zero
void CNode::ResetError(void)
{
	reError = 0;
}

// Add error to Current error (used to backpropagate gradient method for learning)
void CNode::AddToError(const real &reAddedError)
{
	reError += reAddedError;
}

// Calculate the corresponding value of delta (to get the corrected weight value)
void CNode::ComputeDelta(void)
{
	reDelta = reOutput * (1.0f - reOutput) * reError;
	//return reDelta;
}

// Corrects the Weights for all connected nodes (back connected)
void CNode::CorrectWeights(void)
{
	NodeStruct	*tempNodeStruct = nodes;
	real		tempWeight;

	//ComputeDelta();
	while (tempNodeStruct)
	{
		CorrectEta(tempNodeStruct);
		tempWeight = ALPHA * tempNodeStruct->reOldWeight - tempNodeStruct->reEta * tempNodeStruct->rePartDer;
		tempNodeStruct->reWeight += tempWeight;
		if ((reOutput > 0.98f) || (reOutput < 0.02f))	tempNodeStruct->reWeight *= DIFF;
		tempNodeStruct->reWeight = max(MIN_WEIGHT, min(tempNodeStruct->reWeight, MAX_WEIGHT));
		tempNodeStruct->reOldWeight = tempWeight;
		tempNodeStruct->reOldPartDer = tempNodeStruct->rePartDer;
		tempNodeStruct->rePartDer = 0;
		tempNodeStruct = tempNodeStruct->Next;
	}

}

// Corrects Eta (the individual learning rate parameter for accelerated back propagation algorithm)
void CNode::CorrectEta(NodeStruct* tempNodeStructure)
{
	real	reTemp;
	//////////////////////MAYBE//reOldSji//Down//a//line////////////////
	reTemp = tempNodeStructure->reOldSji * tempNodeStructure->rePartDer;

	if (reTemp > 0) tempNodeStructure->reEta += KAPPA;
	else if (reTemp < 0) tempNodeStructure->reEta *= 1.0f - BETA;

	reTemp = tempNodeStructure->reSji;
	tempNodeStructure->reSji = (1.0f - XI) * tempNodeStructure->reOldPartDer + XI * tempNodeStructure->reSji;
	tempNodeStructure->reOldSji = reTemp;
}

// Get the array of weights for the corresponding node
int CNode::GetWeights(real **preWeights) const
{
	int		nReturn = 0;
	real	*ptempWeight = NULL;
	NodeStruct	*tempNodeStruct = nodes;

	//if (preWeights) return 0;	//The argument must be null, in order to return array into it

	ptempWeight = new real[nConnection];

	while(tempNodeStruct)
	{
		ptempWeight[nReturn++] = tempNodeStruct->reWeight;
		tempNodeStruct = tempNodeStruct->Next;
	}
	*preWeights = ptempWeight;
	
	return nReturn;
	//return nConnection;
}

// Adjust the controls parameters (for the learning process)
void CNode::AdjustLearnParam(real fEta, real fAlpha, real fBeta, real fKappa, real fXi)
{
	ETA		= fEta;
	ALPHA	= fAlpha;
	BETA	= fBeta;
	KAPPA	= fKappa;
	XI		= fXi;
}


// Get the controls parameters (for the learning process)
void CNode::GetLearnParam(real& fEta, real& fAlpha, real& fBeta, real& fKappa, real& fXi)
{
	fEta	= ETA;
	fAlpha	= ALPHA;
	fBeta	= BETA;
	fKappa	= KAPPA;
	fXi		= XI;
}

// Add a backward connection
bool CNode::MakeConnection(const CNode* const backNode, const WeightStruct* const NodeParams)
{
	NodeStruct	*tempnodestruct = NULL, *nodestruct = nodes;
	//tempnodestruct = nodes;
	while (nodestruct	!= NULL)
	{
		if (nodestruct->Node == backNode)
			return false;	//means to get out coz this node already in connection
		tempnodestruct = nodestruct;
		nodestruct = nodestruct->Next;
	}
	nodestruct = new NodeStruct;
	nodestruct->Next = NULL;
	nodestruct->Node = (CNode *)backNode;
	nodestruct->reWeight = NodeParams->reWeight;
	nodestruct->reEta = NodeParams->reEta;
	nodestruct->reOldWeight = NodeParams->reOldWeight;
	nodestruct->rePartDer = NodeParams->rePartDer;
	nodestruct->reOldPartDer = NodeParams->reOldPartDer;
	nodestruct->reSji = NodeParams->reSji;
	nodestruct->reOldSji = NodeParams->reOldSji;
	if (tempnodestruct != NULL)
		tempnodestruct->Next = nodestruct;
	else nodes = nodestruct;
	nConnection++;
	return true;				//means that the requested changes has been done successfully.
}

// Same as GetWeights(real**) but with difference that it returns other required parameters than the weights
int CNode::GetWeights(WeightStruct** NodeParams) const
{
	int				nReturn = 0;
	WeightStruct	*ptempWeight = NULL;
	NodeStruct		*tempNodeStruct = nodes;

	//if (preWeights) return 0;	//The argument must be null, in order to return array into it

	ptempWeight = new WeightStruct[nConnection];

	while(tempNodeStruct)
	{
		ptempWeight[nReturn].reEta			= tempNodeStruct->reEta;
		ptempWeight[nReturn].reOldPartDer	= tempNodeStruct->reOldPartDer;
		ptempWeight[nReturn].reOldSji		= tempNodeStruct->reOldSji;
		ptempWeight[nReturn].reOldWeight	= tempNodeStruct->reOldWeight;
		ptempWeight[nReturn].rePartDer		= tempNodeStruct->rePartDer;
		ptempWeight[nReturn].reSji			= tempNodeStruct->reSji;
		ptempWeight[nReturn++].reWeight		= tempNodeStruct->reWeight;

		tempNodeStruct = tempNodeStruct->Next;
	}
	*NodeParams = ptempWeight;
	
	return nReturn;
}

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -