📄 network.cpp
字号:
// NetWork.cpp: implementation of the NetWork class.
//
//////////////////////////////////////////////////////////////////////
#include "stdafx.h"
#include <stdio.h>
#include "BPN.h"
#include "NetWork.h"
#include "Math.h"
#ifdef _DEBUG
#undef THIS_FILE
static char THIS_FILE[]=__FILE__;
#define new DEBUG_NEW
#endif
double sample[1000][6]={0};
INT Units [NUM_LAYERS] = {N, 10, M}; //隐含节点个数为10
//////////////////////////////////////////////////////////////////////
// Construction/Destruction
//////////////////////////////////////////////////////////////////////
NetWork::NetWork()
{
}
NetWork::~NetWork()
{
}
void NetWork::InitializeRandoms()
{
srand(4711);
}
INT NetWork::RandomEqualINT(INT Low, INT High)
{
return rand() % (High-Low+1) + Low;
}
double NetWork::RandomEqualREAL(double Low, double High)
{
return ((double) rand() / RAND_MAX) * (High-Low) + Low;
}
void NetWork::InitializeApplication(NET* Net)
{
Net->Alpha = 0.5;
Net->Eta = 0.05;
Net->Gain = 1;
f = fopen("BPN.txt", "w");
}
void NetWork::FinalizeApplication(NET* Net)
{
fclose(f);
}
//=============================
void NetWork::ReadRecord()
{
char line[100];
FILE* stream;
int i = 0, j = 0;
int matrix_l = 0;
if( (stream = fopen( "sample.txt", "r" )) != NULL ){
while( line[i] != '\0' )
{
if( fgets( line, 100, stream ) == NULL)
printf( "fgets error\n" );
char *str = line;
i++;
for( j=0; j<=5; j++ )
{ //ParseFirstDouble
bool bIsMin=false, bInDecimal=false, bCont=true;
double dRes=0.0,dFrac=1.0;
if( *str != '\0' && *str=='-')
{
bIsMin=true;
(str)++;
}
while( *str != '\0' && bCont && *str!=' ')
{
if( *str=='.' )
bInDecimal = true;
else if( bInDecimal && *str<='9' && *str>='0')
{
dFrac=dFrac*10.0;
dRes += (double)(*str-'0')/dFrac;
}
else if( *str<='9' && *str>='0' )
dRes=dRes*10+(double)(*str-'0');
else{
bCont = false;
str++;
}
if( bCont == true ){
(str)++;
}
}
double d1=(bIsMin) ? -dRes : dRes;
switch(j)
{
case 0: d1 = d1/5; break;
case 1: d1 = d1/180; break;
case 2: d1 = d1/180; break;
case 3: d1 = d1/180; break;
case 4: d1 = d1/10; break;
case 5: d1 = d1/1; break;
// case 5: d1 = d1/(1+exp(-d1)); break;
default: break;
}
sample[matrix_l][j] = d1;
fprintf( f, "%f ", sample[matrix_l][j] );
while(*str==' ' && *str!='\0')
{ //GoToFirstSpace
str++;
}
if(*str=='\0')
continue;
}
matrix_l++; //add one line
fprintf(f,"\n");
}
}
fclose( stream );
}
//=============================
/******************************************************************************
I N I T I A L I Z A T I O N
******************************************************************************/
void NetWork::GenerateNetwork(NET* Net)
{
INT l,i;
Net->Layer = (LAYER**) calloc(NUM_LAYERS, sizeof(LAYER*));
for (l=0; l<NUM_LAYERS; l++) {
Net->Layer[l] = (LAYER*) malloc(sizeof(LAYER));
Net->Layer[l]->Units = Units[l];
Net->Layer[l]->Output = (double*) calloc(Units[l]+1, sizeof(double));
Net->Layer[l]->Error = (double*) calloc(Units[l]+1, sizeof(double));
Net->Layer[l]->Weight = (double**) calloc(Units[l]+1, sizeof(double*));
Net->Layer[l]->WeightSave = (double**) calloc(Units[l]+1, sizeof(double*));
Net->Layer[l]->dWeight = (double**) calloc(Units[l]+1, sizeof(double*));
Net->Layer[l]->Output[0] = BIAS;
if (l != 0) {
for (i=1; i<=Units[l]; i++) {
Net->Layer[l]->Weight[i] = (double*) calloc(Units[l-1]+1, sizeof(double));
Net->Layer[l]->WeightSave[i] = (double*) calloc(Units[l-1]+1, sizeof(double));
Net->Layer[l]->dWeight[i] = (double*) calloc(Units[l-1]+1, sizeof(double));
}
}
}
Net->InputLayer = Net->Layer[0];
Net->OutputLayer = Net->Layer[NUM_LAYERS - 1];
Net->Alpha = 0.9;
Net->Eta = 0.25;
Net->Gain = 1;
}
void NetWork::RandomWeights(NET* Net)
{
INT l,i,j;
for (l=1; l<NUM_LAYERS; l++) {
for (i=1; i<=Net->Layer[l]->Units; i++) {
for (j=1; j<=Net->Layer[l-1]->Units; j++) {
Net->Layer[l]->Weight[i][j] = RandomEqualREAL(-0.5, 0.5);
}
}
}
}
void NetWork::SetInput(NET* Net, double* Input)
{
INT i;
for (i=1; i<=Net->InputLayer->Units; i++) {
Net->InputLayer->Output[i] = Input[i-1];
}
}
void NetWork::GetOutput(NET* Net, double* Output) //得到输出
{
INT i;
for (i=1; i<=Net->OutputLayer->Units; i++) {
Output[i-1] = Net->OutputLayer->Output[i];
}
}
/******************************************************************************
S U P P O R T F O R S T O P P E D T R A I N I N G
******************************************************************************/
void NetWork::SaveWeights(NET* Net)
{
INT l,i,j;
for (l=1; l<NUM_LAYERS; l++) {
fprintf(f, "\nLayer %d:\n", l);
for (i=1; i<=Net->Layer[l]->Units; i++) {
for (j=0; j<=Net->Layer[l-1]->Units; j++) {
Net->Layer[l]->WeightSave[i][j] = Net->Layer[l]->Weight[i][j];
fprintf(f, "[%d][%d]: %f ", i, j, Net->Layer[l]->WeightSave[i][j]);
}
fprintf(f, "\n");
}
}
}
void NetWork::RestoreWeights(NET* Net)
{
INT l,i,j;
for (l=1; l<NUM_LAYERS; l++) {
for (i=1; i<=Net->Layer[l]->Units; i++) {
for (j=0; j<=Net->Layer[l-1]->Units; j++) {
Net->Layer[l]->Weight[i][j] = Net->Layer[l]->WeightSave[i][j];
}
}
}
}
/******************************************************************************
P R O P A G A T I N G S I G N A L S
******************************************************************************/
void NetWork::PropagateLayer(NET* Net, LAYER* Lower, LAYER* Upper) //某层计算输出
{
INT i,j;
double Sum;
for (i=1; i<=Upper->Units; i++) {
Sum = 0;
for (j=0; j<=Lower->Units; j++) {
Sum += Upper->Weight[i][j] * Lower->Output[j]; //前一层的输入的加权和
}
Upper->Output[i] = 1 / (1 + exp(-Net->Gain * Sum));
}
}
void NetWork::PropagateNet(NET* Net) //整个网络计算输出
{
INT l;
for (l=0; l<NUM_LAYERS-1; l++) {
PropagateLayer(Net, Net->Layer[l], Net->Layer[l+1]); //隐层
}
}
/******************************************************************************
B A C K P R O P A G A T I N G E R R O R S
******************************************************************************/
void NetWork::ComputeOutputError(NET* Net, double* Target) //输出的0.5倍的误差平方和
{
INT i;
double Out, Err;
Net->Error = 0;
for (i=1; i<=Net->OutputLayer->Units; i++) {
Out = Net->OutputLayer->Output[i];
Err = Target[i-1]-Out;
Net->OutputLayer->Error[i] = Net->Gain * Out * (1-Out) * Err;
Net->Error += 0.5 * sqr(Err); //0.5倍的误差平方和
}
}
void NetWork::BackpropagateLayer(NET* Net, LAYER* Upper, LAYER* Lower) //计算每一层反向传播的权值调整的步长
{
INT i,j;
double Out, Err;
for (i=1; i<=Lower->Units; i++) {
Out = Lower->Output[i];
Err = 0;
for (j=1; j<=Upper->Units; j++) {
Err += Upper->Weight[j][i] * Upper->Error[j];
}
Lower->Error[i] = Net->Gain * Out * (1-Out) * Err;
}
}
void NetWork::BackpropagateNet(NET* Net) //计算整个网络各层反向传播的权值调整的步长
{
INT l;
for (l=NUM_LAYERS-1; l>1; l--) {
BackpropagateLayer(Net, Net->Layer[l], Net->Layer[l-1]);
}
}
void NetWork::AdjustWeights(NET* Net) //调整权值
{
INT l,i,j;
double Out, Err, dWeight;
for (l=1; l<NUM_LAYERS; l++) {
for (i=1; i<=Net->Layer[l]->Units; i++) {
for (j=0; j<=Net->Layer[l-1]->Units; j++) {
Out = Net->Layer[l-1]->Output[j];
Err = Net->Layer[l]->Error[i];
dWeight = Net->Layer[l]->dWeight[i][j];
Net->Layer[l]->Weight[i][j] += Net->Eta * Err * Out + Net->Alpha * dWeight;
Net->Layer[l]->dWeight[i][j] = Net->Eta * Err * Out;
}
}
}
}
/******************************************************************************
S I M U L A T I N G T H E N E T
******************************************************************************/
void NetWork::SimulateNet(NET* Net, double* Input, double* Output, double* Target, BOOL Training)
{
SetInput(Net, Input); //输入层赋值
PropagateNet(Net); //整个网络计算输出
GetOutput(Net, Output); //得到输出
ComputeOutputError(Net, Target); //输出的0.5倍的误差平方和
if (Training) {
BackpropagateNet(Net); //计算整个网络各层反向传播的权值调整的步长
AdjustWeights(Net); //调整权值
}
}
void NetWork::TrainNet(NET* Net)
{
INT n;
double Output[M];
for (n=0; n<TRAIN_YEARS; n++) {
SimulateNet(Net, &(sample[n][0]), Output, &(sample[n][5]), TRUE);
}
}
void NetWork::TestNet(NET* Net) //观察误差调整情况
{
INT Year;
double Output[M];
TrainError = 0;
for (Year=TRAIN_LWB; Year<=TRAIN_UPB; Year++) {
SimulateNet(Net, &(sample[Year][0]), Output, &(sample[Year][5]), FALSE);
TrainError += Net->Error;
}
TestError = 0;
for (Year=TEST_LWB; Year<=TEST_UPB; Year++) {
SimulateNet(Net, &(sample[Year][0]), Output, &(sample[Year][5]), FALSE);
TestError += Net->Error;
}
}
void NetWork::EvaluateNet(NET* Net) //输出预测结果
{
INT Year;
double Output [M];
fprintf(f, "\n\n\n");
fprintf(f, "Number desired Open-Loop Prediction \n");
fprintf(f, "\n");
for (Year=EVAL_LWB; Year<=EVAL_UPB; Year++) {
SimulateNet(Net, &(sample[Year][0]), Output, &(sample[Year][5]), FALSE);
fprintf(f, "%d %0.7f %0.7f\n",
FIRST_YEAR + Year,
sample[Year][5],
Output [0]);
}
}
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -