📄 network.cpp
字号:
{
ThresholdNode->SetOutput(reValue);
}
// Save the current Network to file with all required parameters
bool CNetwork::SaveToFile(char* FileName, bool bWithLearnParams)
{
//Works only with MLP networks for now//And in the future with MCP//
ofstream FileOut(FileName, ios::binary);
NeuralFileStruct *FileStruct = new NeuralFileStruct;
real *reWeights = NULL;
WeightStruct *Weights = NULL;
int iNodeNum;
strcpy(FileStruct->cType, (nType == NN_MCL) ? (cMCLNetwork) : (cMLPNetwork));
FileStruct->cType[6] = '\0';
FileStruct->bWithLearnParams = bWithLearnParams;
FileStruct->nHiddenLayerNumber = nHiddenLayers;
FileStruct->nInputNeuroneNumber = nInputSize;
FileStruct->nOutputNeuroneNumber = nOutputSize;
FileStruct->reInputNormalizationMax = reInputNormalizationMax;
FileStruct->reInputNormalizationMin = reInputNormalizationMin;
FileStruct->reOutputNormalizationMax = reOutputNormalizationMax;
FileStruct->reOutputNormalizationMin = reOutputNormalizationMin;
FileOut.write((char *) FileStruct, sizeof(NeuralFileStruct) * 1);
if (!bWithLearnParams)
{
for (int i = 0; i < nHiddenLayers; i++)
{
iNodeNum = HiddenLayers[i]->GetNodeNumber();
FileOut.write((char *) &iNodeNum, sizeof(int) * 1);
HiddenLayers[i]->GetWeights(&reWeights);
FileOut.write((char *) reWeights, sizeof(real) * ((((i == 0)? nInputSize : HiddenLayers[i - 1]->GetNodeNumber()) + 1) * iNodeNum));
delete[] reWeights;
}
iNodeNum = OutputLayer->GetNodeNumber();
FileOut.write((char *) &iNodeNum, sizeof(int) * 1);
OutputLayer->GetWeights(&reWeights);
FileOut.write((char *) reWeights, sizeof(real) * ((((i == 0)? nInputSize : HiddenLayers[i - 1]->GetNodeNumber()) + 1) * iNodeNum));
delete[] reWeights;
}
else
{
for (int i = 0; i < nHiddenLayers; i++)
{
iNodeNum = HiddenLayers[i]->GetNodeNumber();
FileOut.write((char *) &iNodeNum, sizeof(int) * 1);
HiddenLayers[i]->GetWeights(&Weights);
FileOut.write((char *) Weights, sizeof(WeightStruct) * ((((i == 0)? nInputSize : HiddenLayers[i - 1]->GetNodeNumber()) + 1) * iNodeNum));
delete[] Weights;
}
iNodeNum = OutputLayer->GetNodeNumber();
FileOut.write((char *) &iNodeNum, sizeof(int) * 1);
OutputLayer->GetWeights(&Weights);
FileOut.write((char *) Weights, sizeof(WeightStruct) * ((((i == 0)? nInputSize : HiddenLayers[i - 1]->GetNodeNumber()) + 1) * iNodeNum));
delete[] Weights;
}
FileOut.close();
return true;
}
// One-argument constructor that loads the network from file
CNetwork::CNetwork(char * FileName)
: bState(false)
, InputLayer(NULL)
, OutputLayer(NULL)
, HiddenLayers(NULL)
, ThresholdNode(NULL)
{
ifstream InFile(FileName, ios::binary);
NeuralFileStruct FileStruct;
real reWeight;
WeightStruct Weight;
int nLayerSize;
NodeStructureWithoutWeight *tempNodeStruct = NULL, *tempBackNodeStruct = NULL;
InFile.read((char *) &FileStruct, sizeof(NeuralFileStruct) * 1);
if (!InFile)
{
cout<<"\nCant open the File.\n";
return;
}
if (strcmp(FileStruct.cType, cMLPNetwork)) return;
nType = NN_MLP;
nHiddenLayers = FileStruct.nHiddenLayerNumber;
nInputSize = FileStruct.nInputNeuroneNumber;
nOutputSize = FileStruct.nOutputNeuroneNumber;
reInputNormalizationMax = FileStruct.reInputNormalizationMax;
reInputNormalizationMin = FileStruct.reInputNormalizationMin;
reOutputNormalizationMax = FileStruct.reOutputNormalizationMax;
reOutputNormalizationMin = FileStruct.reOutputNormalizationMin;
reNormMax = 0.9f;
reNormMin = 0.1f;
SetNorm(reInputNormalizationMin, reInputNormalizationMax, reOutputNormalizationMin, reOutputNormalizationMax);
ThresholdNode = new CNode;
ThresholdNode->SetOutput(-1.0f);
InputLayer = new CLayer(nInputSize);
OutputLayer = new CLayer(nOutputSize);
HiddenLayers = new CLayer*[nHiddenLayers];
if (!FileStruct.bWithLearnParams)
{
for (int i = 0; i < nHiddenLayers; i++)
{
InFile.read((char *) &nLayerSize, sizeof(int) * 1);
HiddenLayers[i] = new CLayer(nLayerSize);
tempNodeStruct = HiddenLayers[i]->pNodeStruct;
HiddenLayers[i]->pbackLayerList = new LayerStruct;
HiddenLayers[i]->pbackLayerList->Next = NULL;
HiddenLayers[i]->pbackLayerList->pLayer = i ? HiddenLayers[i - 1] : InputLayer;
while (tempNodeStruct)
{
tempBackNodeStruct = (i) ? HiddenLayers[i - 1]->pNodeStruct : InputLayer->pNodeStruct;
InFile.read((char *) &reWeight, sizeof(real) * 1);
tempNodeStruct->Node->MakeConnection(ThresholdNode, reWeight);
while (tempBackNodeStruct)
{
InFile.read((char *) &reWeight, sizeof(real) * 1);
tempNodeStruct->Node->MakeConnection(tempBackNodeStruct->Node, reWeight);
tempBackNodeStruct = tempBackNodeStruct->Next;
}
tempNodeStruct = tempNodeStruct->Next;
}
}
InFile.read((char *) &nLayerSize, sizeof(int) * 1);
tempNodeStruct = OutputLayer->pNodeStruct;
OutputLayer->pbackLayerList = new LayerStruct;
OutputLayer->pbackLayerList->Next = NULL;
OutputLayer->pbackLayerList->pLayer = nHiddenLayers ? HiddenLayers[nHiddenLayers - 1] : InputLayer;
while (tempNodeStruct)
{
tempBackNodeStruct = nHiddenLayers ? HiddenLayers[nHiddenLayers - 1]->pNodeStruct : InputLayer->pNodeStruct;
InFile.read((char *) &reWeight, sizeof(real) * 1);
tempNodeStruct->Node->MakeConnection(ThresholdNode, reWeight);
while (tempBackNodeStruct)
{
InFile.read((char *) &reWeight, sizeof(real) * 1);
tempNodeStruct->Node->MakeConnection(tempBackNodeStruct->Node, reWeight);
tempBackNodeStruct = tempBackNodeStruct->Next;
}
tempNodeStruct = tempNodeStruct->Next;
}
}
else
{
for (int i = 0; i < nHiddenLayers; i++)
{
InFile.read((char *) &nLayerSize, sizeof(int) * 1);
HiddenLayers[i] = new CLayer(nLayerSize);
tempNodeStruct = HiddenLayers[i]->pNodeStruct;
HiddenLayers[i]->pbackLayerList = new LayerStruct;
HiddenLayers[i]->pbackLayerList->Next = NULL;
HiddenLayers[i]->pbackLayerList->pLayer = i ? HiddenLayers[i - 1] : InputLayer;
while (tempNodeStruct)
{
tempBackNodeStruct = (i) ? HiddenLayers[i - 1]->pNodeStruct : InputLayer->pNodeStruct;
InFile.read((char *) &Weight, sizeof(WeightStruct) * 1);
tempNodeStruct->Node->MakeConnection(ThresholdNode, &Weight);
while (tempBackNodeStruct)
{
InFile.read((char *) &Weight, sizeof(WeightStruct) * 1);
tempNodeStruct->Node->MakeConnection(tempBackNodeStruct->Node, &Weight);
tempBackNodeStruct = tempBackNodeStruct->Next;
}
tempNodeStruct = tempNodeStruct->Next;
}
}
InFile.read((char *) &nLayerSize, sizeof(int) * 1);
tempNodeStruct = OutputLayer->pNodeStruct;
OutputLayer->pbackLayerList = new LayerStruct;
OutputLayer->pbackLayerList->Next = NULL;
OutputLayer->pbackLayerList->pLayer = nHiddenLayers ? HiddenLayers[nHiddenLayers - 1] : InputLayer;
while (tempNodeStruct)
{
tempBackNodeStruct = nHiddenLayers ? HiddenLayers[nHiddenLayers - 1]->pNodeStruct : InputLayer->pNodeStruct;
InFile.read((char *) &Weight, sizeof(WeightStruct) * 1);
tempNodeStruct->Node->MakeConnection(ThresholdNode, &Weight);
while (tempBackNodeStruct)
{
InFile.read((char *) &Weight, sizeof(WeightStruct) * 1);
tempNodeStruct->Node->MakeConnection(tempBackNodeStruct->Node, &Weight);
tempBackNodeStruct = tempBackNodeStruct->Next;
}
tempNodeStruct = tempNodeStruct->Next;
}
}
InFile.close();
bState = true;
}
// Normalize a vector of real (nInOrOut = 0 for input vector, 1 for Output vector)
void CNetwork::Normalize(const real reToNormalize[], real ** reNormalized, const int &nInOrOut) const
{
int i, nSize = nInOrOut ? nOutputSize : nInputSize;
real *reTempNorm = new real[nSize], reAlpha, reBeta;
reAlpha = nInOrOut ? reNormSlopeOut : reNormSlopeIn;
reBeta = nInOrOut ? reNormTransOut : reNormTransIn;
for (i = 0; i < nSize; i++)
reTempNorm[i] = reToNormalize[i] * reAlpha + reBeta;
*reNormalized = reTempNorm;
}
// DeNormalize a vector of real (nInOrOut = 0 for input vector, 1 for Output vector)
void CNetwork::DeNormalize(const real reToDeNormalize[], real ** reDeNormalized, const int& nInOrOut) const
{
int i, nSize = nInOrOut ? nOutputSize : nInputSize;
real *reTempdeNorm = new real[nSize], reAlpha, reBeta;
reAlpha = nInOrOut ? reNormSlopeOut : reNormSlopeIn;
reBeta = nInOrOut ? reNormTransOut : reNormTransIn;
for (i = 0; i < nSize; i++)
reTempdeNorm[i] = (reToDeNormalize[i] - reBeta) / reAlpha;
*reDeNormalized = reTempdeNorm;
}
// Compute output array and do the normalization/denormalization work
void CNetwork::ComputeOutput(const real reInputArray[], real *reOutputArray) const
{
real *reNormIn = NULL, *reOut = new real[nOutputSize], *reDenormOut = NULL;
Normalize(reInputArray, &reNormIn, 0);
ComputeOutputforThisInputStandard(reNormIn, reOut);
DeNormalize(reOut, &reDenormOut, 1);
for (int i = 0; i < nOutputSize; i++)
reOutputArray[i] = reDenormOut[i];
delete[] reDenormOut;
delete[] reOut;
delete[] reNormIn;
}
// Set the normalization parameters
void CNetwork::SetNorm(const real& InNormMin, const real& InNormMax, const real& OutNormMin, const real& OutNormMax)
{
reInputNormalizationMax = InNormMax;
reInputNormalizationMin = InNormMin;
reOutputNormalizationMax = OutNormMax;
reOutputNormalizationMin = OutNormMin;
if (InNormMin == InNormMax)
{
reNormSlopeIn = 1;
reNormTransIn = 0;
}
else
{
reNormSlopeIn = (reNormMax - reNormMin) / (InNormMax - InNormMin);
reNormTransIn = (reNormMin * InNormMax - reNormMax * InNormMin) / (InNormMax - InNormMin);
}
if (OutNormMin == OutNormMax)
{
reNormSlopeOut = 1;
reNormTransOut = 0;
}
else
{
reNormSlopeOut = (reNormMax - reNormMin) / (OutNormMax - OutNormMin);
reNormTransOut = (reNormMin * OutNormMax - reNormMax * OutNormMin) / (OutNormMax - OutNormMin);
}
}
bool CNetwork::GetState(void) const
{
return bState;
}
// Adjust the Learning options for the Network
void CNetwork::AdjustLearning(const real& reEta, const real& reAlpha, const real& reBeta, const real& reKappa, const real& reXi)
{
CNode::AdjustLearnParam(reEta, reAlpha, reBeta, reKappa, reXi);
}
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -