📄 bp源程序!.txt
字号:
/* batchnet.c
Generic back-propagation neural network*/
#include <stdio.h>
#include <stdlib.h>
#include <math.h>
#include <conio.h>
#include <ctype.h>
#include <string.h>
#include "time.h"
#include "stddef.h"
#define ESC 27
#define ERRORLEVEL 0.000000001
#define ITEMS 8
double C1=1.2;
double C2=0.8;
double lasterror=2.0;
double ERROR_RATE;
double e;
/* typedefs and prototypes for dynamic storage of arrays */
typedef float *PFLOAT;
typedef PFLOAT VECTOR;
typedef PFLOAT *MATRIX;
void VectorAllocate(VECTOR *vector,int nCols);
void AllocateCols(PFLOAT matrix[],int nRows,int nCols);
void MatrixAllocate(MATRIX *pmatrix,int nRows,int nCols);
void MatrixFree(MATRIX matrix,int nRows);
void DisplayNet(MATRIX matrix);
/* define storage for net layers */
/* Arrays for inputs,outputs,deltas,weights & targets */
MATRIX out0; /* input layer */
MATRIX out1; /* hidden layer */
MATRIX delta1; /* delta at hidden layer */
MATRIX delw1; /* change in weights input:hidden */
MATRIX w1; /* weights input:hidden */
MATRIX out2; /* output layer */
MATRIX delta2; /* delta at output layer */
MATRIX delw2; /* change in weight hidden:output */
MATRIX w2; /* weights hidden:output */
MATRIX target; /* target output */
VECTOR PatternID; /* indentifier for each stored pattern */
void main(int argc,char *argv[])
{ /* 1 */
time_t start,finish;
float eta=0.1, /* default learning rate */
alpha=0.02; /* default momentum factor */
int nReportErrors=50; /*error reporting frequency */
float ErrorLevel=ERRORLEVEL; /* satisfactory error level */
char MonitorError=0; /* true sum squared error value */
float error; /* latest sum squared error value */
register int h; /* index hidden layer */
register int i; /* index input layer */
register int j; /* index output layer */
int p, /* index pattern number */
q, /* index iterations desired */
r, /* index run number */
nPatterns, /* number of patterns desired */
nInputNodes, /* number of input nodes */
nHiddenNodes, /* number of hidden nodes */
nOutputNodes, /* number of output nodes */
nIterations, /* number of iteration nodes */
nRuns; /* number of runs */
FILE *fpRun, /* run file */
*fpPattern, /* source pattern input file */
*fpWeights, /* initial weight file */
*fpWeightsOut, /* final weight output file */
*fpResults, /* results output file */
*fpError; /* error output file */
char szResults[66]; /* various filenames(pathnames) */
char szError[66];
char szPattern[66];
char szWeights[66];
char szWeightsOut[66];
char *progname=*argv; /*name of executable DOS 3.x only */
/* read optional-arguments */
start=time(NULL);
for (;argc>1;argc--)
{ /* 2 */
char *arg=*++argv;
if (*arg!='-')
break;
switch(*++arg)
{ /* 3 */
case 'e':sscanf(++arg,"%d",&nReportErrors);break;
case 'd':sscanf(++arg,"%f",&ErrorLevel);break;
default:break;
} /* 3 */
} /* 2 */
if (argc<2)
{ /* 2 */
fprintf(stderr,"\n\tUsage: %s [-en -df] runfilename\n",progname);
fprintf(stderr,"\t -en =>report error every n iterations\n");
fprintf(stderr,"\t -df =>done if mean spuared error <f\n");
exit(1);
} /* 2 */
printf("Please input error contral rate:(0.001-0.1)\n");
scanf("%f",&e);
/* open run file for reading */
if ((fpRun=fopen(*argv,"r"))==NULL)
{ /* 2 */
fprintf(stderr,"%s:can't open file %s\n",progname,*argv);
exit(1);
} /* 2 */
/* Read first line :no. of runs (lines to read from run file) */
fscanf(fpRun,"%d",&nRuns);
/*--------------------beginning of work loop------------------*/
for (r=0;r<nRuns;r++)
{ /* 2 */
/* read and parse the run specification line; */
fscanf(fpRun,"%s%s%s%s%s%d%d%d%d%d%f%f",
szResults, /* output results file */
szError, /* error output file */
szPattern, /* pattern input file */
szWeights, /* initial weights file */
szWeightsOut, /* final weights output file */
&nPatterns, /* number of patterns to learn */
&nIterations, /* number of iterations through the date */
&nInputNodes, /* number of input nodes */
&nHiddenNodes, /* number of hidden nodes */
&nOutputNodes, /* number of output nodes */
&eta, /* learning rate */
&alpha); /* momentum factor */
/*--------------allocate dynamic storage for all data-------*/
MatrixAllocate(&out0, nPatterns, nInputNodes);
MatrixAllocate(&out1, nPatterns, nHiddenNodes);
MatrixAllocate(&out2, nPatterns, nOutputNodes);
MatrixAllocate(&delta2, nPatterns, nOutputNodes);
MatrixAllocate(&delw2, nOutputNodes, nHiddenNodes+1);
MatrixAllocate(&w2, nOutputNodes, nHiddenNodes+1);
MatrixAllocate(&delta1, nPatterns, nHiddenNodes);
MatrixAllocate(&delw1, nHiddenNodes, nInputNodes+1);
MatrixAllocate(&w1, nHiddenNodes, nInputNodes+1);
MatrixAllocate(&target, nPatterns, nOutputNodes);
VectorAllocate(&PatternID, nPatterns);
/*--------Read the initial weight matrices;----------*/
if ((fpWeights=fopen(szWeights,"r"))==NULL)
{ /* 3 */
fprintf(stderr,"%s:can't open file %s\n",progname,szWeights);
exit(1);
} /* 3 */
/* read input => hidden weights */
for (h=0;h<nHiddenNodes;h++)
for (i=0;i<=nInputNodes;i++)
{ /* 3 */
fscanf(fpWeights,"%f",&w1[h][i]);
delw1[h][i]=0.0;
} /* 3 */
/* read hidden => out weights */
for (j=0;j<nOutputNodes;j++)
for (h=0;h<=nHiddenNodes;h++)
{ /* 3 */
fscanf(fpWeights,"%f",&w2[j][h]);
delw2[j][h]=0.0;
} /* 3 */
fclose(fpWeights);
/*-------------Read in all patterns to be learned;--------*/
if((fpPattern=fopen(szPattern,"r"))==NULL)
{ /* 3 */
fprintf(stderr,"%s:can't open file %s\n",progname,szPattern);
exit(1);
} /* 3 */
for(p=0;p<nPatterns;p++)
{ /* 3 */
for(i=0;i<nInputNodes;i++)
if(fscanf(fpPattern,"%f",&out0[p][i])!=1)
goto ALLPATTERNSREAD;
/* read in target outputs for each pattern */
for (j=0;j<nOutputNodes;j++)
fscanf(fpPattern,"%f",&target[p][j]);
/* read in identifier for each pattern */
fscanf(fpPattern,"%f",&PatternID[p]);
} /* 3 */
ALLPATTERNSREAD:
fclose(fpPattern);
if (p<nPatterns)
{ /* 3 */
fprintf(stderr,"%s:%d out of %d patterns read\n",
progname,p,nPatterns);
nPatterns=p;
} /* 3 */
/* open error output file */
if ((fpError=fopen(szError,"w"))==NULL)
{ /* 3 */
fprintf(stderr,"%s:can't open file %s\n",progname,szError);
exit(1);
} /* 3 */
//clrscr();
fprintf(stderr,"\n\n\n\t\t Back-Propagation Neural Networks\n\n");
fprintf(stderr,nIterations>1?"\n\n\n\t\tBegin training,Press'ESC'to
termina
te...\n\n":"\n\tTesting\n\n");
/*---------begin iteration loop--------*/
for (q=0;q<nIterations;q++)
{ /* 3 */
for (p=0;p<nPatterns;p++)
{ /* 4 */
/*-------hidden layer-------*/
/* Sum input to hidden layer over all
input-weight combinations */
for (h=0;h<nHiddenNodes;h++)
{ /* 5 */
float sum=w1[h][nInputNodes]; /*begin with bias */
for (i=0;i<nInputNodes;i++)
sum+=w1[h][i]*out0[p][i];
/* compute output(use sigmoid) */
out1[p][h]=1.0/(1.0+exp(-sum));
} /* 5 */
/*------------output layer----------------*/
for (j=0;j<nOutputNodes;j++)
{ /* 5 */
float sum=w2[j][nHiddenNodes];
for (h=0;h<nHiddenNodes;h++)
sum+=w2[j][h]*out1[p][h];
out2[p][j]=1.0/(1.0+exp(-sum));
} /* 5 */
/*-----------delta output------------*/
/* Compute deltas for each output unit for a given pattern */
for(j=0;j<nOutputNodes;j++)
delta2[p][j]=(target[p][j]-out2[p][j])*
out2[p][j]*(1.0-out2[p][j]);
/*----------delta hidden---------------*/
for(h=0;h<nHiddenNodes;h++)
{
/* 5 */
float sum=0.0;
for(j=0;j<nOutputNodes;j++)
sum+=delta2[p][j]*w2[j][h];
delta1[p][h]=sum*out1[p][h]*(1.0-out1[p][h]);
}
/* 5 */
} /* 4 */
/*---------adapt weights hidden:output--------------*/
for (j=0;j<nOutputNodes;j++)
{ /* 4 */
float dw; /* delta weight */
float sum=0.0;
/* grand sum of deltas for each output node for one epoch */
for (p=0;p<nPatterns;p++)
sum+=delta2[p][j];
/* Calculate new bias weight for each output unit */
dw=eta*sum+alpha*delw2[j][nHiddenNodes];
w2[j][nHiddenNodes]+=dw;
delw2[j][nHiddenNodes]=dw; /* delta for bias */
/* Calculate new weights */
for (h=0;h<nHiddenNodes;h++)
{ /* 5 */
float sum=0.0;
for (p=0;p<nPatterns;p++)
sum+=delta2[p][j]*out1[p][h];
dw=eta*sum+alpha*delw2[j][h];
w2[j][h]+=dw;
delw2[j][h]=dw;
} /* 5 */
} /* 4 */
/*---------adapt weights input:hidden----------*/
for(h=0;h<nHiddenNodes;h++)
{ /* 4 */
float dw; /* delta weight
*/
float sum=0.0;
for (p=0;p<nPatterns;p++)
sum+=delta1[p][h];
/* Calculate new bias weight for each hidden unit */
dw=eta*sum+alpha*delw1[h][nInputNodes];
w1[h][nInputNodes]+=dw;
delw1[h][nInputNodes]=dw;
/* Calculate new weights */
for (i=0;i<nInputNodes;i++)
{ /* 5 */
float sum=0.0;
for (p=0;p<nPatterns;p++)
sum+=delta1[p][h]*out0[p][i];
dw=eta*sum+alpha*delw1[h][j];
w1[h][i]+=dw;
delw1[h][i]=dw;
} /* 5 */
} /* 4 */
/* ---------monitor keyboard requests-----------*/
if (kbhit())
{ /* 4 */
int c=getch();
if((c=toupper(c))=='E')
MonitorError++;
else if (c==ESC)
exit(0); /*Terminate
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -