⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 network.cpp

📁 函数逼近
💻 CPP
📖 第 1 页 / 共 2 页
字号:
/*
 * 
 *	 Copyright 2004-2006 Ghassan OREIBY
 *   
 * 	 This file is part of Neurality.
 *
 *   Neurality is free software; you can redistribute it and/or modify
 *   it under the terms of the GNU General Public License as published by
 *   the Free Software Foundation; either version 2 of the License, or
 *   (at your option) any later version.
 *
 *   Neurality is distributed in the hope that it will be useful,
 *   but WITHOUT ANY WARRANTY; without even the implied warranty of
 *   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 *   GNU General Public License for more details.
 *
 *  You should have received a copy of the GNU General Public License
 *   along with Neurality; if not, write to the Free Software
 *   Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA  02110-1301  USA
 *
 * 
 */


//#include "NeuralNetwork.h"
#include "network.h"


CNetwork::CNetwork(void)
: HiddenLayers(NULL)
, InputLayer(NULL)
, OutputLayer(NULL)
, nInputSize(0)
, nOutputSize(0)
, nHiddenLayers(0)
, ThresholdNode(NULL)
, reInputNormalizationMax(0)
, reInputNormalizationMin(0)
, reOutputNormalizationMax(0)
, reOutputNormalizationMin(0)
, nType(0)
, reNormSlopeIn(1.0f)
, reNormSlopeOut(1.0f)
, reNormTransIn(0)
, reNormTransOut(0)
, bState(false)
{
}


CNetwork::~CNetwork(void)
{
	/*for (int i = 0; i < nHiddenLayers; i++)
		delete HiddenLayers[i];*/
	//delete[] HiddenLayers;
	if (OutputLayer) delete OutputLayer;
	if (InputLayer) delete InputLayer;
	if (ThresholdNode) delete ThresholdNode;
	bState = false;
}

// Returns the size of the input layer
int CNetwork::GetInputSize(void) const
{
	return nInputSize;
}

// Returns the size of the output layer.
int CNetwork::GetOutputSize(void) const
{
	return nOutputSize;
}

// Assigns the output array (or vector) with the corresponding value of the input
bool CNetwork::ComputeOutputforThisInputStandard(const real InputArray[], real *OutputArray) const
{
	NodeStructureWithoutWeight	*tempNodeStruct = InputLayer->pNodeStruct;
	//LayerStruct	*tempLayerStruct = HiddenLayers;
	int i=0;

	/*if ((nInSize != nInputSize) || (nOutSize != nOutputSize))
		return false;*/

	while (tempNodeStruct)
	{
		tempNodeStruct->Node->SetOutput(InputArray[i++]);
		tempNodeStruct = tempNodeStruct->Next;
	}
    
	/*while (tempLayerStruct)
	{
		tempLayerStruct->pLayer->ComputeLayer();
		tempLayerStruct = tempLayerStruct->Next;
	}*/
	for (i = 0; i < nHiddenLayers; i++)
		HiddenLayers[i]->ComputeLayer();

	if (OutputLayer) OutputLayer->ComputeLayer();
	else return false;

	tempNodeStruct = OutputLayer->pNodeStruct;
	i=0;
	while (tempNodeStruct)
	{
		OutputArray[i++] = tempNodeStruct->Node->GetNodeOutput();
		tempNodeStruct = tempNodeStruct->Next;
	}

	return true;
}

// Make the network learn (with backpropagation algorithm) the input/output couple
int CNetwork::Learn(const int& nSamples, const real InputArray[], const real DesiredOutputArray[], 
					const real& reDesiredError, const int& nItrShow, const int& nItrSave, 
					HDC *ClientDC, RECT *rect, BOOL* pbContinue)
{
	NodeStructureWithoutWeight	*tempNodeStruct = NULL;
	real	reSqrCalculatedError, reError, *CalculatedOutput = new real[nOutputSize],
			**InArray = NULL, **OutArray = NULL, *tempNorm = NULL, reAlpha, reBeta;
	int		i = 0, j = 0, k = 0, itr = 0;	
	BOOL	bDontExist = FALSE;

    if (pbContinue == NULL) 
	{
		pbContinue = new BOOL;
		*pbContinue = TRUE;
		bDontExist = TRUE;
	}

	
	InArray = new real*[nSamples];	
	for (i = 0; i < nSamples; i++)
	{
		InArray[i] = new real[nInputSize];
		for (j = 0; j < nInputSize; j++)
			InArray[i][j] = InputArray[i * nInputSize + j];
	}

	for (i = 0; i < nSamples; i++)
	{
		Normalize(InArray[i], &tempNorm, 0);
		for (j = 0; j < nInputSize; j++)
			InArray[i][j] = tempNorm[j];
		delete[] tempNorm;
	}


	OutArray = new real*[nSamples];
	for (i = 0; i < nSamples; i++)
	{
		OutArray[i]= new real[nOutputSize];
		for (j = 0; j < nOutputSize; j++)
			OutArray[i][j] = DesiredOutputArray[i * nOutputSize + j];
	}

	for (i = 0; i < nSamples; i++)
	{
		Normalize(OutArray[i], &tempNorm, 1);
		for (j = 0; j < nOutputSize; j++)
			OutArray[i][j] = tempNorm[j];
		delete[] tempNorm;
	}

	if (ClientDC && rect)
	{
		reAlpha = (rect->bottom - rect->top) / (logf(reDesiredError) - logf(nOutputSize * 1.0f));
		reBeta = rect->bottom - reAlpha * logf(reDesiredError);

		for (i = 0; i <= (rect->right - rect->left) / 50; i++)
		{
			MoveToEx(*ClientDC, i * 50, 0 , NULL);
			LineTo(*ClientDC, i * 50, rect->bottom);
		}

		reError = 100.0f;
		j = 1;

		for (i = 1; (reError / j) >= (reDesiredError); i++)
		{
			j *= 10; 
			//MoveToEx(*ClientDC, 0, rect->bottom - i * 50, NULL);
			//LineTo(*ClientDC, rect->right, rect->bottom - i * 50);
			MoveToEx(*ClientDC, 0, reAlpha * logf(reError / j) + reBeta, NULL);
			LineTo(*ClientDC, rect->right, reAlpha * logf(reError / j) + reBeta);
		}

		MoveToEx(*ClientDC, 0, 0, NULL);
	}



	do
	{
		reSqrCalculatedError = 0;
		itr++;

		for (k = 0; k < nSamples; k++)
		{		
			OutputLayer->ResetError();
			for(j = 0; j < nHiddenLayers; j++)
				HiddenLayers[nHiddenLayers - j - 1]->ResetError();
			ComputeOutputforThisInputStandard(InArray[k], CalculatedOutput);
			i = 0;
			tempNodeStruct = OutputLayer->pNodeStruct;
			while (tempNodeStruct)
			{
				tempNodeStruct->Node->AddToError(OutArray[k][i] - CalculatedOutput[i++]);
				tempNodeStruct = tempNodeStruct->Next;
			}
			OutputLayer->backPropagateError();
			for(j = nHiddenLayers - 1; j >= 0; j--)
				HiddenLayers[j]->backPropagateError();
			for(j = 0; j < nOutputSize; j++)
			{
				reError = OutArray[k][j] - CalculatedOutput[j];
				reSqrCalculatedError +=  reError * reError / nSamples;
			}
		}
		OutputLayer->CorrectWeights();
		for(j = 0; j < nHiddenLayers; j++)
			HiddenLayers[nHiddenLayers - j - 1]->CorrectWeights();
		if (!(itr % nItrShow)) 
		{
			if (ClientDC)				//rect->bottom - (int) (reSqrCalculatedError * 500)
				LineTo(*ClientDC, itr /*/ 5*/, (int) (reAlpha * logf(reSqrCalculatedError) + reBeta));
			else
				cout<<"Iteration No\t"<<itr<<"\t"<<reSqrCalculatedError<<endl;
			if (itr >= (nItrSave * itr / nItrShow))
				SaveToFile("tempFile.nnf", true);
		}
	}while ((reSqrCalculatedError  > reDesiredError) && (itr < MAX_ITR) && (*pbContinue));  

	SaveToFile("Stopped.nnf", true);
	
	if (bDontExist) delete pbContinue;
	delete[] CalculatedOutput;
	delete[] InArray;
	delete[] OutArray;

	return itr;
}

CNetwork::CNetwork(CLayer * HiddenLayer[], const int& nHiddenSize, const int& nInSize, 
				   const int& nOutSize, const real& reInNormMin, const real& reInNormMax,
				   const real& reOutNormMin, const real& reOutNormMax, const int& ntype)
:nHiddenLayers(nHiddenSize), nInputSize(nInSize), nOutputSize(nOutSize)
, reInputNormalizationMax(reInNormMax)
, reInputNormalizationMin(reInNormMin)
, reOutputNormalizationMax(reOutNormMax)
, reOutputNormalizationMin(reOutNormMin)
, reNormMax(0.9f)
, reNormMin(0.1f)
, nType(ntype)
, bState(false)
{
	InputLayer		= new CLayer(nInputSize);
	OutputLayer		= new CLayer(nOutputSize);
	ThresholdNode	= new CNode;

	SetNorm(reInNormMin, reInNormMax, reOutNormMin, reOutNormMax);
    
	ThresholdNode->SetOutput(-1.0f);
	OutputLayer->MakeConnection(ThresholdNode);

	if (nHiddenLayers)
	{	
		HiddenLayers = HiddenLayer;
		OutputLayer->MakeConnection(HiddenLayer[nHiddenLayers - 1]);
		for (int i = nHiddenLayers - 1; i >= 0 ; i--)
		{
			HiddenLayers[i]->MakeConnection(ThresholdNode);
			if (i != 0) HiddenLayers[i]->MakeConnection(HiddenLayers[i - 1]);
		}
		HiddenLayers[0]->MakeConnection(InputLayer);
	}
	else OutputLayer->MakeConnection(InputLayer);
	if (nType == NN_MCL)
	{
		if (nHiddenLayers != 0) OutputLayer->MakeConnection(InputLayer);
		for (int i = nHiddenLayers - 1; i > 0; i--)
		{
			if (i != 0) HiddenLayers[i]->MakeConnection(InputLayer);
			for (int j = 0; j <  i - 1; j++)
				HiddenLayers[i]->MakeConnection(HiddenLayers[j]);
			if (i != nHiddenLayers - 1) OutputLayer->MakeConnection(HiddenLayers[i]);
		}	
	}
	bState = true;
}

// Set the threshold node value
void CNetwork::SetThresholdValue(real reValue)

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -