⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 ids_bpn.cpp

📁 关于BP神经网络的一个课程设计源代码. 关于BP神经网络的一个课程设计源代码.
💻 CPP
📖 第 1 页 / 共 2 页
字号:
		
	
        //test:--------------------------------------------
		/*for(m=0; m<N; m++)             
		{
			for(i=0; i<IN; i++) //设置输入层的输出
			{ 
				cout<<"data_copy["<<m+1<<"].iuput["<<i+1<<"]="<<data_copy[m].input[i]<<endl;
			}
		}*/
			
		for(m=0; m<N; m++)                     //对N个样本进行增量训练,m为样本个数
		{
			//输入正向传播
		
			for(i=0; i<IN; i++) //设置输入层的输出
			{
				x_out[i] = data_copy[m].input[i];
				/*if (max < x_out[i])   //比较得出输入的最大最小值
			    	max = x_out[i];
				
				if (min > x_out[i]) 
					min = x_out[i];
//			cout<<"x_out["<<i+1<<"]="<<x_out[i]<<endl;*/
				//cout<<"x_out["<<i+1<<"]="<<x_out[i]<<endl;
			}

			/*for(i=0; i<IN; i++)   //归一化后的输入值
			{
				x_out[i] = (x_out[i] - min) / (max-min);
//              cout<<"x_out["<<i+1<<"]="<<x_out[i]<<endl;
			}*/

			for(i=0; i<ON; i++)              //输出层的期望输出
			{
				y[i] = data[m].teach[i];
//				cout<<"y["<<i+1<<"]="<<y[i]<<endl;
			}			
				
			for(i=0; i<HN; i++)                //计算隐含层的输出
			{
				sumtemp = 0.0;
				for(j=0; j<IN; j++)
					sumtemp += w[j][i] * x_out[j];
				hn_out[i] = tanh(sumtemp);     //隐含层作用函数为y=tanh(x)
			}
				
			for(i=0; i<ON; i++)              //计算输出层的输出
			{
				sumtemp = 0.0;
				for(j=0; j<HN; j++) 
					sumtemp += v[j][i] * hn_out[j];
				y_out[i] = g(sumtemp);            //输出层作用函数为sigmod函数
//				y_out[i] = tanh(sumtemp);
			}

				
			//误差反向传播 这些都是在内循环完成的
			for(i=0; i<ON; i++)
			{
				errtemp = y[i] - y_out[i]; 
				y_delta[i] = errtemp * g(y_out[i]) * (1.0 - g(y_out[i]));
//                y_delta[i] = errtemp * (1/(1+exp(-y_out[i]))) * (1.0 - (1/(1+exp(-y_out[i]))));    //计算输出层的delta
				error += (errtemp * errtemp);  //所有样本的全局误差
			}
				
			for(i=0; i<HN; i++)                //计算隐含层的delta
			{
				errtemp = 0.0;
				for(j=0; j<ON; j++)
                    errtemp += y_delta[j] * v[i][j];
				hn_delta[i] = errtemp * (1.0 + hn_out[i]) * (1.0 - hn_out[i]);
			}
				
			for(i=0; i<ON; i++)       //调整输出层的权值和偏置系数
			{
				for(j=0; j<HN; j++)
				{
					deltav[j][i] = alpha * deltav[j][i] + beta * y_delta[i] * hn_out[j];
					v[j][i] += deltav[j][i];
				}
			}
				
				
			for(i=0; i<HN; i++)        //调整隐含层的权值和偏置系数
			{
				for(j=0; j<IN; j++)
				{
					deltaw[j][i] = alpha * deltaw[j][i] + beta * hn_delta[i] * x_out[j];
					w[j][i] += deltaw[j][i];
					//cout<<w[i][j];
				}
			}
		}//end for 样本	
				
		//全局误差判断
		error = error/2;
		//cout<<"全局误差Error="<<error<<endl;
        //cout<<"error="<<error<<endl;
		if(error < errlimit)
		{
			cout<<"loop="<<loop<<endl<<"error="<<error<<endl<<"***********error<errlimit***********\n"<<"Press any key to go on!"<<endl;
			getch();
			break;
		}
			
	}//end while 循环次数
	cout<<endl<<"error="<<error<<endl;
	getch();
    //----------------------------------------------------------------
	//输出训练结果
	cout<<endl<<"training completion............"<<endl;
	cout<<"HN="<<HN<<endl;
	cout<<"Error="<<error<<endl;
	cout<<"Loop="<<loop<<endl;
	for(i=0; i<IN; i++)
		for(j=0; j<HN; j++)
		    //cout<<"w["<<i+1<<"]["<<j+1<<"]="<<w[i][j]<<endl\
			;
			;

	for(i=0; i<HN; i++)
		for(j=0; j<ON; j++)
			//cout<<"v["<<i+1<<"]["<<j+1<<"]="<<v[i][j]<<endl\
			;
			;
			
	


		
	//------------------------------------------------------------------
	//利用上面训练得到的权值和偏置系数进行检验
	cout<<endl<<"Testing the samples(Training Data)… …"<<endl;
	for(m=0; m<N; m++)                     //对N个样本分别检验其实际输出
	{
		for(i=0; i<IN; i++)                 //设置输入层的输出
		{
			x_out[i] = data[m].input[i];
			//cout<<"data["<<m+1<<"].x["<<i+1<<"]="<<data[m].input[i]<<endl;
		}
		for(i=0; i<HN; i++)                //计算隐含层的输出
		{
			sumtemp = 0.0;
			for(j=0; j<IN; j++)
				sumtemp += w[j][i] * x_out[j];
			hn_out[i] = g(sumtemp);     //隐含层作用函数为y=tanh(x)
		}

		for(i=0; i<ON; i++)     //计算输出层的输出
		{
			sumtemp = 0.0;
			for(j=0; j<HN; j++)
				sumtemp += v[j][i] * hn_out[j];
//			y_out[i] = 1/(1+exp(-sumtemp));
			y_out[i] = g(sumtemp);
			//cout<<"data["<<m+1<<"].y["<<i+1<<"]= "<<y_out[i]<<endl;
		}
	}


	//------------------------------------------------------------------
	//利用上面训练得到的权值和偏置系数对新的数据进行检验
	cout<<endl<<"Assume we detected some new system activities, now we test them to certify \
		whether these activities are in Nomal State… …"<<endl;
	
	
	struct                           //检验数据
	{
		double input[IN];
	}newdata[7] = {{206.3806, 14.07,  24.463},
				   {74.7335, 13.653, 59.721},
				   {139.4121, 14.065, 21.306},
	               {107.4832, 16.825, 27.325},
	               {86.0032, 18.99,  25.637},
	               {16.0, 10.0, 5.324},
	               {5000, 100, 2000}}; //注意这里的值需要改变
	
	
	for(m=0; m<7; m++)                     //对N个样本分别检验其实际输出
	{
		for(i=0; i<IN; i++)                 //设置输入层的输出
		{
			x_out[i] = newdata[m].input[i];
			cout<<"newdata["<<m+1<<"].x["<<i+1<<"]="<<newdata[m].input[i]<<endl;
		}
		for(i=0; i<HN; i++)                //计算隐含层的输出
		{
			sumtemp = 0.0;
			for(j=0; j<IN; j++)
				sumtemp += w[j][i] * x_out[j];
			hn_out[i] = tanh(sumtemp);     //隐含层作用函数为y=tanh(x)
		}
		
		for(i=0; i<ON; i++)     //计算输出层的输出
		{
			sumtemp = 0.0;
			for(j=0; j<HN; j++)
				sumtemp += v[j][i] * hn_out[j];
			//			y_out[i] = 1/(1+exp(-sumtemp));
			y_out[i] = g(sumtemp);
			cout<<"newdata["<<m+1<<"].y["<<i+1<<"]= "<<y_out[i]<<endl;
			if(i==0)
			{
				if(y_out[i]>0.9) flage1=1;
				else flage1=0;
			}
			if(i==1)
			{
				if(y_out[i]<0.1) 
				{
					if(flage1=1) cout<<"Yes,State Normal.\n"<<endl;
				}
				else if(y_out[i]>0.1 && y_out[i]>=y_out[i-1]) 
				{

					//textcolor(RED);
					cout<<"Warning,State Abnormal!Intrusions may exist,Check it!\n"<<endl;
					//textcolor(LIGHTGRAY);

				}
				else cout<<"Sorry,Can not certify whether intrusion exists\n"<<endl;

			}
			
		    //if(i==0&&y_out[i]>0.9) cout<<"系统正常!"<<endl;
			//if(i==1 && y_out[i]>0.1) cout<<"警告,存在攻击!"<<endl;
		 
		}
		//cout<<y_out[1]<<endl<<y_out[2]<<endl;
		
	}

		
    End_Show();
	return 0;
}		

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -