📄 bp源程序!.txt
字号:
gracefully on quit key */
} /* 4 */
/*---------sum Squared Error------------------*/
if(MonitorError ||(q%nReportErrors==0))
{ /* 4 */
for (p=0,error=0.0;p<nPatterns;p++)
{ /* 5 */
for (j=0;j<nOutputNodes;j++)
{ /* 6 */
float temp=target[p][j]-out2[p][j];
error+=temp*temp;
} /* 6 */
} /* 5 */
/* Average error per node over all patterns */
error/=(nPatterns*nOutputNodes);
/* print iteration number and error value */
finish=time(NULL);
fprintf(stderr,"\t\tIteration %7d/%-7d Error
%f\tTime:%4.0f\r",
q,nIterations,error,difftime(finish,start)); /*to console */
MonitorError=0;
if (q%nReportErrors==0)
fprintf(fpError,"%d %f\n",q,error); /* to
file */
ERROR_RATE=(lasterror-error)/lasterror;
if(ERROR_RATE>0&&ERROR_RATE<e)
eta=eta*C1;
else if(ERROR_RATE<0)
eta=eta*C2;
else
{}
lasterror=error;
/* Terminate when error satisfactory */
if (error<ErrorLevel)
break;
} /* 4 */
} /* 3 */
/*----------end of iteration loop---------------*/
for(p=0,error=0.0;p<nPatterns;p++)
{ /* 3 */
for (j=0;j<nOutputNodes;j++)
{ /* 4 */
float temp=target[p][j]-out2[p][j];
error+=temp*temp;
} /* 4 */
} /* 3 */
/* Average error over all patterns */
error/=(nPatterns*nOutputNodes);
/* print final iteration number and error value */
fprintf(stderr,"\t\tIteration %7d/%-7d\tError
%f\n\n",q,nIterations,error
); /* to console */
fprintf(fpError,"\n\t%d %f\n",q,error); /* to file */
finish=time(NULL);
fprintf(fpError,"\n\tElapsed time:%4.0f\n",difftime(finish,start));
fprintf(stderr,"\n\t\tElapsed time:%4.0f\n\n",difftime(finish,start));
fclose(fpError);
/*------------print final weights--------------*/
if ((fpWeightsOut=fopen(szWeightsOut,"w"))==NULL)
{ /* 3 */
fprintf(stderr,"%s:can't write file %s\n",progname,szWeightsOut);
exit(1);
} /* 3 */
fprintf(fpWeightsOut,"\n\n-----The final weights after
learning-----\n\n");
fprintf(fpWeightsOut,"\n alpha:%6.2f\tbeta:%6.2f\n",eta,alpha);
fprintf(fpWeightsOut," weights between input and hidden layers\n\n" );
for (h=0;h<nHiddenNodes;h++)
{ /* 3 */
for(i=0;i<nInputNodes;i++)
fprintf(fpWeightsOut,"%2d => %-2d:
%6.4f%c",i+1,h+1,w1[h][i],(i%(ITEM
S/4)==(ITEMS/4)-1)?'\n':'\t');
fprintf(fpWeightsOut,"\nHidden%2d bias:
%6.4f\n",h+1,w1[h][nInputNodes
]);
} /* 3 */
fprintf(fpWeightsOut,"\n\n weights between hidden and output
layers\n\n
");
for(j=0;j<nOutputNodes;j++)
{ /* 3 */
for(h=0;h<nHiddenNodes;h++)
fprintf(fpWeightsOut,"%2d => %-2d:
%6.4f%c",h+1,j+1,w2[j][h],(h%(ITEM
S/4)==(ITEMS/4)-1)?'\n':'\t');
fprintf(fpWeightsOut,"\nOut %2d bias:
%6.4f\n",j+1,w2[j][nHiddenNode
s]);
} /* 3 */
fclose(fpWeightsOut);
/* rewrite initial weight file*/
if ((fpWeights=fopen(szWeights,"w"))==NULL)
{ /* 3 */
fprintf(stderr,"%s:can't write file %s\n",progname,szWeightsOut);
exit(1);
} /* 3 */
for (h=0;h<nHiddenNodes;h++)
{ /* 3 */
for(i=0;i<=nInputNodes;i++)
fprintf(fpWeightsOut,"%6.4f\n",w1[h][i]);
//fprintf(fpWeightsOut,"\nHidden%2d bias:
%6.4f\n",h+1,w1[h][nInputNod
es]);
} /* 3 */
//fprintf(fpWeightsOut,"\n\n weights between hidden and output
layers\n
\n");
for(j=0;j<nOutputNodes;j++)
{ /* 3 */
for(h=0;h<=nHiddenNodes;h++)
fprintf(fpWeightsOut,"%6.4f\n",w2[j][h]);
//fprintf(fpWeightsOut,"\nOut %2d bias:
%6.4f\n",j+1,w2[j][nHiddenNo
des]);
} /* 3 */
fclose(fpWeights);
/*---------print final activation values------------*/
if((fpResults=fopen(szResults,"w"))==NULL)
{ /* 3 */
fprintf(stderr,"%s:can't write file %s\n",progname,szResults);
fpResults=stderr;
} /* 3 */
/* print final output vector */
fprintf(fpResults,"\nalpha:%6.2f\tbeta:%6.2f\n\n",eta,alpha);
fprintf(fpResults,"PatternNO. \n RealOutput TargetOutput ");
fprintf(fpResults," PatternID bias\n");
for(p=0;p<nPatterns;p++)
{ /* 3 */
fprintf(fpResults," %d \n",p+1);
for(j=0;j<nOutputNodes;j++)
{ fprintf(fpResults," %f \t%f\t",out2[p][j],target[p][j]);
fprintf(fpResults,"%-6.0f\t",PatternID[p]);
fprintf(fpResults,"%f\n",out2[p][j]-target[p][j]);
}
} /* 3 */
fclose(fpResults);
printf("please input value:\n");
for(i=0;i<nInputNodes;i++)
scanf("%f",&out0[1][i]);
for(h=0;h<nHiddenNodes;h++)
{
float sum=w1[h][nInputNodes];
for(i=0;i<nInputNodes;i++)
sum+=w1[h][i]*out0[1][i];
out1[1][h]=1.0/(1.0+exp(-sum));
}
for(j=0;j<nOutputNodes;j++)
{
float sum=w2[j][nHiddenNodes];
for(h=0;h<nHiddenNodes;h++)
sum+=w2[j][h]*out1[1][h];
out2[1][j]=1.0/(1.0+exp(-sum));
}
for(j=0;j<nOutputNodes;j++)
fprintf(stderr,"The result is:\n\t%f\n",out2[1][j]);
/*----------free dynamic storage for date--------------*/
MatrixFree(out0,nPatterns);
MatrixFree(out1,nPatterns);
MatrixFree(delta1,nPatterns);
MatrixFree(delw1,nHiddenNodes);
MatrixFree(w1,nHiddenNodes);
MatrixFree(out2,nPatterns);
MatrixFree(delta2,nPatterns);
MatrixFree(delw2,nPatterns);
MatrixFree(w2,nOutputNodes);
MatrixFree(target,nPatterns);
free(PatternID);
} /* 2 */
fclose(fpRun); /* close run file */
} /* 1 */
/*---------------- the end of the main program ------------------*/
/*--------Array storage allocation routines----------*/
/* Allocate space for vector of float cells for
one dimensional dynamic vector[cols] */
void VectorAllocate(VECTOR *vector,int nCols)
{
if ((*vector=(VECTOR)calloc(nCols,sizeof(float)))==NULL)
{
fprintf(stderr,"Sorry!Not enough memory for nodes\n");
exit(1);
}
}
/* Allocate space for columns(float cells)for
dynamic two dimensional matrix[rows][cols] */
void AllocateCols(PFLOAT matrix[],int nRows,int nCols)
{ /* 1 */
int i;
for(i=0;i<nRows;i++)
VectorAllocate(&matrix[i],nCols);
} /* 1 */
/* Allocate space for a two dimensional dynamic matrix[rows][cols] *
/
void MatrixAllocate(MATRIX *pmatrix,int nRows,int nCols)
{ /* 1 */
if((*pmatrix=(MATRIX)calloc(nRows,sizeof(PFLOAT)))==NULL)
{ /* 2 */
fprintf(stderr,"Sorry!Not enough memory for nodes\n");
exit(1);
} /* 2 */
AllocateCols(*pmatrix,nRows,nCols);
} /* 1 */
/* free space for two dimensional dynamic array */
void MatrixFree(MATRIX matrix,int nRows)
{ /* 1 */
int i;
for (i=0;i<nRows;i++)
free(matrix[i]);
free(matrix);
} /* 1 */ /* weights.c
For use with a Neural with learnable diased,such as batchet
Program invication on MS-DOS:
weights seed in hidden out[ads_max_wt]
Specifying abs_max_wt is optional,default value is 0.3.
Output is to stdout; can be redirected to weight.dat
This program generate random weights to be used in a neural
network which has the number of nodes as specified above. */
#include<stdio.h>
#include<stdlib.h>
#define MAXWEIGHT ((float)0.3) /* maximum weight */
#define SCALEWEIGHT ((float)32767) /*normalizing scale factor */
#define ITEMS 8 /*items printed per line */
void main(int argc,char *argv[])
{
float scale=SCALEWEIGHT; /*normalize weights in range +-1 */
float wmax; /* wmax is abs val of max weight */
int i,
j,
nInputNodes,
nHiddenNodes,
nOutputNodes;
FILE *fpWeights, /* initial weights file */
*fpError; /* error output file */
static char szWeights[66]="D:\\TC\\WTINI.DAT";
static char szError[66]="D:\\TC\\WTERROR.DAT";
char *progname=*argv; /* name of executable DOS 3.x only */
if (argc<5) /* specifying abs_max_wt is optional */
{
printf("Usage:weights seed in hidden out [abs_max_wt] \n");
printf("weights seed(10-100) Innum Hinum Outnum WeightsMax \n");
exit(1);
}
srand((unsigned)atoi(argv[1])); /* parameter is random seed */
nInputNodes=atoi(argv[2]); /*number of input nodes */
nHiddenNodes=atoi(argv[3]); /*number of hidden nodes */
nOutputNodes=atoi(argv[4]); /* number of output nodes */
/* set range of initial weights */
if ((wmax=atoi(argv[5]))==0)
wmax=MAXWEIGHT; /* default value, if not specified */
/* set weights to random number between -wmax and wmax */
/* generate initial layer 1 weights , including a learnable bias
for each hidden unit */
if((fpWeights=fopen(szWeights,"w"))==NULL)
{fprintf(stderr,"%s:can't write file %s\n",progname,*argv);
exit(1);
}
fprintf(stdout,"%s:The output weights data file is %s\n",progname,szWe
ights);
for (i=0;i<nHiddenNodes;i++)
{
for(j=0;j<nInputNodes+1;j++)
{
float frand=rand();
float w1=wmax*(1.0-2.0*frand/scale);
fprintf(fpWeights,"%9.6f%c",w1,
(j%ITEMS==ITEMS-1||j==nInputNodes)?'\n':' ');
}
}
/* generate layer 2 weights with bias,as above */
for (i=0;i<nOutputNodes;i++)
{
for (j=0;j<nHiddenNodes+1;j++)
{
float frand=rand();
float w2=wmax*(1.0-2.0*frand/scale);
fprintf(fpWeights,"%9.6f%c",w2,
(j%ITEMS==ITEMS-1||j==nHiddenNodes)?'\n':' ');
}
}
}
--
※ 来源:.飘渺水云间 freecity.dhs.org.[FROM: ytc]
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -