⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 bparthimeitic812一输出的.cpp

📁 本程序为一个误差向后传播的三层前馈神经网络有指导的学习算法。
💻 CPP
字号:










		                 
#define	nPatterns  10;            /* number of patterns desired        */
#define	nInputNodes 2;          /* number of input nodes             */
#define	nHiddenNodes 3;         /* number of hidden nodes            */
#define	nOutputNodes  1;        /* number of output nodes            */
			








#define ERRORLEVEL		0.0001
#define nIterations     2000000



/*define storage for net layers*/
#include <iostream.h>
#include <stdio.h>
#include <stdlib.h>
#include <math.h>
#include <conio.h>
#include <ctype.h>
#include <string.h>

double	out0[10][2] = {0.0000, 0.0000,
                       0.0000, 0.5000,
					   0.1667, 0.1667,
					   0.1667, 0.8333,
					   0.3333, 1.0000,
					   0.5000, 0.3333,
					   0.6667, 0.3333,
					   0.6667, 0.8333,
					   0.8333, 0.8333,
					   1.0000, 0.1667};
double target[10][1] = {1.0000,1.0000,1.0000,0.5000,0.1667,0.6667,0.5000,0.0000,0.0000,0.3333};
					   /*input layers						*/
double	out1[10][3];		/*hidden layer						*/
double	delta1[10][3];		/*delta at hidden layer				*/
double 	delw1[3][2];		/*change in weights input:hidden	*/
double	w1[3][2];			/*weights input:hidden				*/
double  out2[10][1];		/*output layer						*/
double  delta2[10][1];		/*delta at output layer				*/
double	delw2[1][3];		/*change in weights hidden:output	*/
double	w2[1][3];			/*weights hidden:output				*/
						










void main()
{                                 //····························
	double	eta = 0.15,
			alpha = 0.075;
	int		nReportErrors = 100;	/*error reporting frequency*/	  
	double	ErrorLevel = double(ERRORLEVEL);/*satisfactory error level */

	double	error;					/*latest sum squared error value*/
	
    int	h,i,j;					/*index hidden,input,output layer*/
    int	p;                    /* index pattern number              */
    int	q;                    /* index iteration number            */

	



	/*--------------------- beginning of work loop -------------------------*/

	
		
      
		/*--------- 读入初始权重: -------------------*/
		

		/* read input:hidden weights */
		for (h = 0;  h < nHiddenNodes;  h++)
		{
			for (i = 0;  i <= nInputNodes;  i++)
			{
				w1[h][i] = (double)((rand()/32767.0)*2-1);
				delw1[h][i] = 0.0;
			}
		}


		/* read hidden:out weights */
		for (j = 0;  j < nOutputNodes;  j++)
		{
			for (h = 0;  h <= nHiddenNodes;  h++)
			{
				w2[j][h] = (double)((rand()/32767.0)*2-1);
				delw2[j][h] = 0.0;
			}
		}
		  
       
		

		

		





		/*--------------------- begin iteration loop ------------------------*/
		for (q = 0;  q < nIterations;  q++)
		{
			for (p = 0;  p < nPatterns;  p++)
			{
				/*-------------------- hidden layer --------------------------*/
				/* Sum input to hidden layer over all input-weight combinations */
				for (h = 0;  h < nHiddenNodes;  h++)
				{
					double sum = w1[h][nInputNodes];  /* begin with bias  */

					for (i = 0;  i < nInputNodes;  i++)
						sum   +=   w1[h][i]  *  out0[p][i];

					/* Compute output (use sigmoid) */
					out1[p][h]   =   1.0  /  (1.0  +  exp(-sum));
				}

				/*-------------------- output layer --------------------------*/
				for (j = 0;  j < nOutputNodes;  j++)
				{
					double  sum = w2[j][nHiddenNodes];

					for (h = 0;  h < nHiddenNodes;  h++)
					{	sum  +=   w2[j][h]  *  out1[p][h];}

					out2[p][j]  =  1.0  /  (1.0  +  exp(-sum));
				}

				/*-------------------- delta output --------------------------*/
				/* Compute deltas for each output unit for a given pattern */
				for (j = 0;  j < nOutputNodes;  j++)
					delta2[p][j] = (target[p][j] - out2[p][j]) * out2[p][j] * (1.0 - out2[p][j]);

				            /*-------------------- delta hidden --------------------------*/
            
				for (h = 0;  h < nHiddenNodes;  h++)
				{
					double  sum = 0.0;

					for (j = 0;  j < nOutputNodes;  j++)
						sum  +=  delta2[p][j] * w2[j][h];
					delta1[p][h]  =  sum  *  out1[p][h]  *  (1.0 - out1[p][h]);
				}
			}		/*end of for p*/

			/*-------------- adapt weights hidden:output ---------------------*/
			for (j = 0;  j < nOutputNodes;  j++)
			{
				double  dw;                  /* delta weight */
				double  sum = 0.0;

				/* grand sum of deltas for each output node for one epoch */
				for (p = 0;  p < nPatterns;  p++)
					sum  +=  delta2[p][j];

				/* Calculate new bias weight for each output unit */
				dw   =   eta * sum  +  alpha * delw2[j][nHiddenNodes];/*阈值改变*/
				w2[j][nHiddenNodes]   +=   dw;
				delw2[j][nHiddenNodes] =   dw;     /* delta for bias */

				/* Calculate new weights */
				for (h = 0;  h < nHiddenNodes;  h++)
				{
					double  sum = 0.0;

					for (p = 0;  p < nPatterns;  p++)
						sum  +=  delta2[p][j] * out1[p][h];

					dw = eta * sum  +  alpha * delw2[j][h];
					w2[j][h] += dw;
					delw2[j][h]  =   dw;
				}
			}   /*end of  for adapt weights hidden:output*/

			/*-------------------- adapt weights input:hidden -----------------*/
			for (h = 0;  h < nHiddenNodes;  h++)
			{
				double  dw;                  /* delta weight */
				double  sum = 0.0;

				for (p = 0;  p < nPatterns;  p++)
					sum  +=  delta1[p][h];

				/* Calculate new bias weight for each hidden unit */
				dw   =   eta * sum  +  alpha * delw1[h][nInputNodes];/*阈值改变*/
				w1[h][nInputNodes]   +=   dw;
				delw1[h][nInputNodes] =   dw;

				/* Calculate new weights */
				for (i = 0;  i < nInputNodes;  i++)
				{
					double  sum = 0.0;

					for (p = 0;  p < nPatterns;  p++)
						sum  +=  delta1[p][h] * out0[p][i];

					dw = eta * sum  +  alpha * delw1[h][i];
					w1[h][i]     +=  dw;
					delw1[h][i]  =   dw;
				}
			}    /*end of for  adapt weights input:hidden */


		

			/*-------------------- Sum Squared Error ------------------------*/
			
				for (p = 0, error = 0.0;   p < nPatterns;   p++)
				{
					for (j = 0;  j < nOutputNodes;  j++)
					{
						double  temp   =   target[p][j] - out2[p][j];

						error += temp * temp;
					}
				}

				/* Average error per node over all patterns */
				error  /=  (nPatterns * nOutputNodes);

			

				

				/* Terminate when error satisfactory */
				if (error < ErrorLevel)                                 /*循环结束的条件*/
					break;
			
	   }  
		/*-----end of iteration loop -----*/


        


		/*print results*/
		      for (p = 0; p < nPatterns; p++)
			  { 
				  for (j =0; j < nOutputNodes; j++)
				  { 
					  cout<<out2[p][j]<<endl;

				  }
			  }



		

	
	
	



}//··········································




⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -