⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 加了注释bp_back.cpp

📁 本程序为一个误差向后传播的三层前馈神经网络有指导的学习算法。
💻 CPP
字号:

#include "bp_back.h"



int main(int argc, char* argv[])
{                                 //····························
	float	eta = 0.15,
			alpha = 0.075;
	int		nReportErrors = 100;	/*error reporting frequency*/	  
	float	ErrorLevel = float(ERRORLEVEL);/*satisfactory error level */
	char	MonitorError = 0;		/*true when monitor error display*/
	float	error;					/*latest sum squared error value*/
	int		h,i,j;					/*index hidden,input,output layer*/
	int		p,                    /* index pattern number              */
			q,                    /* index iteration number            */
			r,                    /* index run number                  */
			nPatterns,            /* number of patterns desired        */
			nInputNodes,          /* number of input nodes             */
			nHiddenNodes,         /* number of hidden nodes            */
			nOutputNodes,         /* number of output nodes            */
			nIterations,          /* number of iterations desired      */
			nRuns;                /* number of runs (or input lines)   */
	FILE	*fpRun,               /* run file                          */
			*fpPattern,           /* source pattern input file         */
			*fpWeights,           /* initial weight file               */
			*fpWeightsOut,        /* final weight output file          */
			*fpResults,           /* results output file               */
			*fpError;             /* error output file                 */
	char  szResults[66];          /* various filenames (pathnames)     */
	char  szError[66];
	char  szPattern[66];
	char  szWeights[66];
	char  szWeightsOut[66];
	char  *progname  =  *argv;  /* name of executable DOS 3.x only  */
    char  farg[200] ;

	/* read optional - arguments */
	/*
	for (; argc > 1;  argc--)
	{
		char *arg = *++argv;

		if (*arg  !=  '-')
			break;

		switch (*++arg)
		{
			case 'e':   sscanf(++arg,  "%d",  &nReportErrors);   break;
			case 'd':   sscanf(++arg,  "%f",  &ErrorLevel);      break;
			default:    break;
		}
	}

	if (argc < 2)
	{
		fprintf(stderr, "Usage:  %s {-en -df} runfilename\n",  progname);
		fprintf(stderr, "   -en   =>  report error every n iterations\n");
		fprintf(stderr, "   -df   =>  done if mean squared error < f\n");
		exit(1);
	}
	*/

	printf("input file name:\n") ;
	scanf("%s",farg) ;
	/* Open run file for reading */
	//if ((fpRun = fopen(*argv, "r"))   ==   NULL)
	if ((fpRun = fopen(farg, "r"))   ==   NULL)
	{
		fprintf(stderr, "%s: can't open file %s\n", progname, *argv);
		exit(1);
	}


	/* Read first line: no. of runs (lines to read from run file) */
	fscanf(fpRun,  "%d",  &nRuns);




	/*--------------------- beginning of work loop -------------------------*/
	for (r = 0;   r < nRuns;   r++)
	{
		/* read and parse the run specification line; */
		fscanf(fpRun,
			"%s %s %s %s %s %d %d %d %d %d %f %f",
			szResults,          /* output results file */
			szError,            /* error output file */
			szPattern,          /* pattern input file */
			szWeights,          /* initial weights file */
			szWeightsOut,       /* final weights output file */
			&nPatterns,         /* number of patterns to learn */
			&nIterations,       /* number of iterations through the data */
			&nInputNodes,       /* number of input nodes  */
			&nHiddenNodes,      /* number of hidden nodes */
			&nOutputNodes,      /* number of output nodes */
			&eta,               /* learning rate */
			&alpha);            /* momentum factor */


		/*----------allocate dynamic storage for all data ---------------*/
		MatrixAllocate(&out0,      nPatterns,    nInputNodes);
		MatrixAllocate(&out1,      nPatterns,    nHiddenNodes);
		MatrixAllocate(&out2,      nPatterns,    nOutputNodes);
		MatrixAllocate(&delta2,    nPatterns,    nOutputNodes);
		MatrixAllocate(&delw2,     nOutputNodes, nHiddenNodes + 1);
		MatrixAllocate(&w2,        nOutputNodes, nHiddenNodes + 1);
		MatrixAllocate(&delta1,    nPatterns,    nHiddenNodes);
		MatrixAllocate(&delw1,     nHiddenNodes, nInputNodes + 1);
		MatrixAllocate(&w1,        nHiddenNodes, nInputNodes + 1);
		MatrixAllocate(&target,    nPatterns,    nOutputNodes);
		VectorAllocate(&PatternID, nPatterns);
      
		/*--------- 读入初始权重: -------------------*/
		if ((fpWeights = fopen(szWeights,"r"))  ==  NULL)
		{
			fprintf(stderr,  "%s: can't open file %s\n",  progname, szWeights);
			exit(1);
		}


		/* read input:hidden weights */
		for (h = 0;  h < nHiddenNodes;  h++)
			for (i = 0;  i <= nInputNodes;  i++)
		{
				fscanf(fpWeights,  "%f",      &w1[h][i]);
				delw1[h][i] = 0.0;
		}


		/* read hidden:out weights */
		for (j = 0;  j < nOutputNodes;  j++)
			for (h = 0;  h <= nHiddenNodes;  h++)
		{
				fscanf(fpWeights,  "%f",      &w2[j][h]);
				delw2[j][h] = 0.0;
		}


		fclose(fpWeights);

		/*------------ Read in all patterns to be learned:----------------*/
		if ((fpPattern = fopen(szPattern, "r"))  ==  NULL)
		{
			fprintf(stderr,  "%s: can't open file %s\n",  progname, szPattern);
			exit(1);
		}

		for (p = 0;  p < nPatterns;  p++)
		{
			for (i = 0;   i < nInputNodes;   i++)
				if (fscanf(fpPattern,  "%f", &out0[p][i])  != 1 )
					goto  ALLPATTERNSREAD;


			/* read in target outputs for input patterns read */
			for (j = 0;  j < nOutputNodes;  j++)
				fscanf(fpPattern,  "%f",   &target[p][j]);

			/* read in identifier for each pattern */
			fscanf(fpPattern,  "%f ",   &PatternID[p]);
		}

		ALLPATTERNSREAD:
		fclose(fpPattern);

		if (p < nPatterns)
		{
			fprintf(stderr, "%s:  %d out of %d patterns read\n",
                 progname,  p,  nPatterns);
			nPatterns = p;
		}

		/* open error output file */
		if ((fpError = fopen(szError, "w"))  ==  NULL)
		{
			fprintf(stderr,  "%s: can't open file %s\n",  progname, szError);
			exit(1);
		}

      fprintf(stderr,  nIterations > 1  ?  "Training...\n"  :  "Testing\n");






		/*--------------------- begin iteration loop ------------------------*/
		for (q = 0;  q < nIterations;  q++)
		{
			for (p = 0;  p < nPatterns;  p++)
			{
				/*-------------------- hidden layer --------------------------*/
				/* Sum input to hidden layer over all input-weight combinations */
				for (h = 0;  h < nHiddenNodes;  h++)
				{
					float sum = w1[h][nInputNodes];  /* begin with bias  */

					for (i = 0;  i < nInputNodes;  i++)
						sum   +=   w1[h][i]  *  out0[p][i];

					/* Compute output (use sigmoid) */
					out1[p][h]   =   1.0  /  (1.0  +  exp(-sum));
				}

				/*-------------------- output layer --------------------------*/
				for (j = 0;  j < nOutputNodes;  j++)
				{
					float  sum = w2[j][nHiddenNodes];

					for (h = 0;  h < nHiddenNodes;  h++)
						sum  +=   w2[j][h]  *  out1[p][h];

					out2[p][j]  =  1.0  /  (1.0  +  exp(-sum));
				}

				/*-------------------- delta output --------------------------*/
				/* Compute deltas for each output unit for a given pattern */
				for (j = 0;  j < nOutputNodes;  j++)
					delta2[p][j] = (target[p][j] - out2[p][j]) * out2[p][j] * (1.0 - out2[p][j]);

				/*-------------------- delta hidden --------------------------*/
            
				for (h = 0;  h < nHiddenNodes;  h++)
				{
					float  sum = 0.0;

					for (j = 0;  j < nOutputNodes;  j++)
						sum  +=  delta2[p][j] * w2[j][h];//注意此处delta1的求法
					delta1[p][h]  =  sum  *  out1[p][h]  *  (1.0 - out1[p][h]);
				}
			}		/*end of for p*/

			/*-------------- adapt weights hidden:output ---------------------*/
			for (j = 0;  j < nOutputNodes;  j++)
			{
				float  dw;                  /* delta weight */
				float  sum = 0.0;

				/* grand sum of deltas for each output node for one epoch */
				for (p = 0;  p < nPatterns;  p++)
					sum  +=  delta2[p][j];

				/* Calculate new bias weight for each output unit */
				dw   =   eta * sum  +  alpha * delw2[j][nHiddenNodes];/*delw2[j][nHiddenNodes]是存储阈值使它改变*/
				w2[j][nHiddenNodes]   +=   dw;
				delw2[j][nHiddenNodes] =   dw;     /* delta for bias */

				/* Calculate new weights */
				for (h = 0;  h < nHiddenNodes;  h++)
				{
					float  sum = 0.0;

					for (p = 0;  p < nPatterns;  p++)
						sum  +=  delta2[p][j] * out1[p][h];//对应w2[j][h],一个输出节点和所有的隐含节点jh,资料有错

					dw = eta * sum  +  alpha * delw2[j][h];//delw2[j][h]是存储权值的
					w2[j][h] += dw;
					delw2[j][h]  =   dw;
				}
			}   /*end of  for adapt weights hidden:output*/

			/*-------------------- adapt weights input:hidden -----------------*/
			for (h = 0;  h < nHiddenNodes;  h++)
			{
				float  dw;                  /* delta weight */
				float  sum = 0.0;

				for (p = 0;  p < nPatterns;  p++)
					sum  +=  delta1[p][h];

				/* Calculate new bias weight for each hidden unit */
				dw   =   eta * sum  +  alpha * delw1[h][nInputNodes];/*delw1[h][nInputNodes]存储阈值使它改变*/
				w1[h][nInputNodes]   +=   dw;
				delw1[h][nInputNodes] =   dw;

				/* Calculate new weights */
				for (i = 0;  i < nInputNodes;  i++)
				{
					float  sum = 0.0;

					for (p = 0;  p < nPatterns;  p++)
						sum  +=  delta1[p][h] * out0[p][i];//对应w1[h][i]

					dw = eta * sum  +  alpha * delw1[h][i];
					w1[h][i]     +=  dw;
					delw1[h][i]  =   dw;
				}
			}    /*end of for  adapt weights input:hidden */


			
			/* -------------- monitor keyboard requests ---------------------*/
			if (kbhit())
			{
				int    c = getch();

				if ((c = toupper(c))  == 'E')
					MonitorError++;
				else if (c == ESC)
					break;              /* Terminate gracefully on quit key */
			}


			/*-------------------- Sum Squared Error ------------------------*/
			if (MonitorError  ||  (q % nReportErrors   ==   0))
			{
				for (p = 0, error = 0.0;   p < nPatterns;   p++)
				{
					for (j = 0;  j < nOutputNodes;  j++)
					{
						float  temp   =   target[p][j] - out2[p][j];

						error += temp * temp;
					}
				}

				/* Average error per node over all patterns */
				error  /=  (nPatterns * nOutputNodes);

				/* Print iteration number and  error value */
				fprintf(stderr,"Iteration %5d/%-5d  Error %f\r", q, nIterations, error); 
				/* to console */
				MonitorError = 0;

				if (q % nReportErrors   ==   0)
					fprintf(fpError, "%d  %f\n",  q,  error);  /* to file */

				/* Terminate when error satisfactory */
				if (error < ErrorLevel)                                 /*循环结束的条件*/
					break;//跳出iteration
			}
	   }  
		/*-----end of iteration loop -----*/








		for (p = 0, error = 0.0;  p < nPatterns;  p++)
		{
			for (j = 0;  j < nOutputNodes;  j++)
			{
				float  temp   =   target[p][j] - out2[p][j];

				error += temp * temp;
			}
		}

		/* Average error over all patterns */
		error  /=  (nPatterns * nOutputNodes);

		/* Print final iteration number and error value */
		fprintf(stderr, "Iteration %6d/%-6d  Error %f\n", q, nIterations, error); /* to console */
		fprintf(fpError, "\n%d  %f\n",  q,  error);        /* to file */
		fclose(fpError);

		/*---------------- print final weights -------------------------------*/
		if ((fpWeightsOut = fopen(szWeightsOut,"w"))  ==  NULL)
		{
			fprintf(stderr,  "%s: can't write file %s\n",  progname, szWeightsOut);
			exit(1);
		}

		for (h = 0;  h < nHiddenNodes;  h++)
			for (i = 0;  i <= nInputNodes;  i++)
				fprintf(fpWeightsOut,  "%g%c", w1[h][i], i%ITEMS==ITEMS-1 ? '\n':' ');

		for (j = 0;  j < nOutputNodes;  j++)
			for (h = 0;  h <= nHiddenNodes;  h++)
				fprintf(fpWeightsOut,  "%g%c", w2[j][h], j%ITEMS==ITEMS-1 ? '\n':' ');

		fclose(fpWeightsOut);


		/*----------------- Print final activation values-------------------- */
		if ((fpResults = fopen(szResults,"w"))  ==  NULL)
		{
			fprintf(stderr,  "%s: can't write file %s\n",  progname, szResults);
			fpResults = stderr;
		}

		/* Print final output vector */
		for (p = 0;  p < nPatterns;  p++)
		{
			fprintf(fpResults, "%d   ",  p);

			for (j = 0;  j < nOutputNodes;  j++)
				fprintf(fpResults, " %f",  out2[p][j]);

			fprintf(fpResults, "  %-6.0f\n", PatternID[p]);
		}

		fclose(fpResults);

		/*---------------- free dynamic storage for data ---------------------*/
		MatrixFree(out0,      nPatterns);
		MatrixFree(out1,      nPatterns);
		MatrixFree(delta1,    nPatterns);
		MatrixFree(delw1,     nHiddenNodes);
		MatrixFree(w1,        nHiddenNodes);
		MatrixFree(out2,      nPatterns);
		MatrixFree(delta2,    nPatterns);
		MatrixFree(delw2,     nOutputNodes);
		MatrixFree(w2,        nOutputNodes);
		MatrixFree(target,    nPatterns);
		free(PatternID);
	}//此处对应nRun

	fclose(fpRun);                     /* close run file */

	return 0;
}//··········································




/*----------------- Array storage allocation routines ---------------------*/
/* Allocate space for vector of float cells for
   one dimensional dynamic vector[cols]
*/
void VectorAllocate(VECTOR *vector, int nCols)
{
   if ((*vector = (VECTOR) calloc(nCols, sizeof(float))) == NULL)
   {
      fprintf(stderr, "Sorry! Not enough memory for nodes\n");
      exit(1);
   }
}



/* Allocate space for columns (float cells) for
   dynamic two dimensional matrix[rows][cols]
*/
void AllocateCols(PFLOAT matrix[], int nRows, int nCols)
{
   int  i;

   for (i = 0;  i < nRows;  i++)
      VectorAllocate(&matrix[i], nCols);
}



/* Allocate space for a two dimensional dynamic matrix [rows] [cols]
*/

void MatrixAllocate(MATRIX *pmatrix, int nRows, int nCols)
{
   if ( (*pmatrix  =  (MATRIX) calloc(nRows,  sizeof(PFLOAT) ) )   ==  NULL)
   {
      fprintf(stderr, "Sorry! Not enough memory for nodes\n");
      exit(1);
   }

   AllocateCols(*pmatrix, nRows, nCols);
}



/* free space for two dimensional dynamic array */
void MatrixFree(MATRIX matrix,  int nRows)
{
   int   i;
   for (i = 0;  i < nRows;  i++)
      free(matrix[i]);
   free(matrix);
}

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -