📄 node.h
字号:
/*
*
* Copyright 2004-2006 Ghassan OREIBY
*
* This file is part of Neurality.
*
* Neurality is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* Neurality is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with Neurality; if not, write to the Free Software
* Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
*
*
*/
#pragma once
#ifndef __NODE__
#define __NODE__
#include "NeuralNetwork.h"
// The Starting value for the trial independant learning rate.
//#define ETA .9f
//#define ALPHA .75f
//#define BETA .2f
//#define KAPPA .01f
//#define XI .7f
typedef float real; // Special type so i can change it for precision reasons later.
#define MAX_WEIGHT 100.0f
#define MIN_WEIGHT -100.0f
#define DIFF 0.95f
CLASSEXPORT class CLASS_DECLSPEC CNode;
struct NodeStruct // Structure that holds to wich node the current node is connected to and with which weight.
{
CNode *Node; // The node connected to this node (providing inputs)
real reWeight; // The weight corresponding for the current node
real reOldWeight;// The old weight difference (for back propagation)
real reEta; // The Learning rate parameter (individual for each adjustable network parameter)
real rePartDer; // The current value of the partial derivative of the error surface
real reOldPartDer;//The old value of the partial derivative
real reSji;
real reOldSji;
NodeStruct *Next; // A pointer to the connected node
};
struct WeightStruct // Structure that holds all params required for a connection to resume learning later
{
real reWeight; // The weight corresponding for the current node
real reOldWeight;// The old weight difference (for back propagation)
real reEta; // The Learning rate parameter (individual for each adjustable network parameter)
real rePartDer; // The current value of the partial derivative of the error surface
real reOldPartDer;//The old value of the partial derivative
real reSji;
real reOldSji;
};
// This is the class that represents a node (or soma) in our Neural Network.
class CNode
{
friend class CLayer;
friend class CNetwork;
private:
//static real ETA, ALPHA, BETA, KAPPA, XI;
// The final output of the Node (or Soma)
real reOutput;
// The List of nodes that are connected (backward) to the current node.
NodeStruct* nodes;
// The linear Sum of the Node (without the Activation Function)
real reLinearSum;
// The error factor
real reError;
// The error gradient
real reDelta;
// The Activation function of the current Node
real ActivationFunction(const real &reLinearSum);
// Get the linear sum of the output of the node
real GetLinearSum(const NodeStruct* const nodestruct);
// The number of connection for this node
int nConnection;
protected:
CNode(void); //Class Constructor.
~CNode(void); //Class Destructor.
// Return the output of the node (to be used by other nodes).
real GetNodeOutput(void) const;
// Add a backward connection
bool MakeConnection(const CNode* const backNode, real reWeight = 0.25f);
// Edit the weight of the connection with the parameter node.
bool EditConnection(const CNode* const Node, real reWeight);
// Remove the Connection with the parameter Node.
bool RemoveConnection(const CNode* const Node);
// Compute the corresponding output
real ComputeOutput(void);
// Give The ability to set the output (for input nodes)
void SetOutput(real reOut);
// Retrun the value of the error gradient (for the learning algorithm)
real GetDelta(void) const;
// Propagate the error backward
void backPropagateError(void);
// Sets the error (reError) to zero
void ResetError(void);
// Add error to Current error (used to backpropagate gradient method for learning)
void AddToError(const real &reAddedError);
// Calculate the corresponding value of delta (to get the corrected weight value)
void ComputeDelta(void);
// Corrects the Weights for all connected nodes (back connected)
void CorrectWeights(void);
// Corrects Eta (the individual learning rate parameter for accelerated back propagation algorithm)
void CorrectEta(NodeStruct* tempNodeStructure);
// Get the array of weights for the corresponding node
int GetWeights(real **preWeights) const;
// Adjust the controls parameters (for the learning process)
static void AdjustLearnParam(real fEta = .9f, real fAlpha = .75f, real fBeta = .2f,
real fKappa = .01f, real fXi = .7f);
// Get the controls parameters (for the learning process)
public:
static void CNode::GetLearnParam(real& fEta, real& fAlpha, real& fBeta, real& fKappa, real& fXi);
// Add a backward connection
protected:
bool MakeConnection(const CNode* const backNode, const WeightStruct* const NodeParams);
// Same as GetWeights(real**) but with difference that it returns other required parameters than the weights
int GetWeights(WeightStruct** NodeParams) const;
};
#endif
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -