⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 neuralnetwork.cpp

📁 利用matlab编写的神经网络算法
💻 CPP
📖 第 1 页 / 共 2 页
字号:
// NeuralNetwork.cpp: implementation of the NeuralNetwork class.
//
//////////////////////////////////////////////////////////////////////

#include "stdafx.h"
#include "NN.h"
#include "NeuralNetwork.h"
#include "math.h"
#ifdef _DEBUG
#undef THIS_FILE
static char THIS_FILE[]=__FILE__;
#define new DEBUG_NEW
#endif

//////////////////////////////////////////////////////////////////////
// Construction/Destruction
//////////////////////////////////////////////////////////////////////

NeuralNetwork::NeuralNetwork()
{

}

NeuralNetwork::~NeuralNetwork()
{

}

double NeuralNetwork::sigmoid(double input)
{
	double output;    
	output=1.0/(1+exp(0-input));
	return output;

}
//生成a~b之间的随机数
double NeuralNetwork::mrand(double a,double b)
{
   	double out;
	out=rand();
	out=a+(b-a)*(out/RAND_MAX);
	return out;
}

//计算节点输出,layer=0,1,2;0为输入层,1为隐层,2为输出层
//node为节点,node=0-number_node;
//w[layer][node1][node2]为权值,layer为神经元层,node1为本层节点,node2为前层节点
//node1,node2均从0开始计数。
//weight_1的形式为weight_1[hidden][input];
//DEL void NeuralNetwork::input_of_hidden_func()
//DEL {
//DEL 
//DEL }

//DEL void NeuralNetwork::output_of_hidden_func()
//DEL {
//DEL 
//DEL 	for (int i=0;i<sample_num;i++)
//DEL 	{
//DEL 		for (int j=0;j<number_hidden;j++)
//DEL 		{
//DEL 	        
//DEL 		}
//DEL 	}
//DEL }
//weight_2的形式为weight_2[output][hidden];
//DEL void NeuralNetwork::input_of_output_func()
//DEL {
//DEL     double sum=0;
//DEL 
//DEL 	for (int i=0;i<sample_num;i++)
//DEL 	{
//DEL 		for (int j=0;j<number_output;j++)
//DEL 		{
//DEL 			for (int h=0;h<number_hidden;h++)
//DEL 			{
//DEL 				sum=sum+weight_2[j][h]*output_of_hidden[i][h];
//DEL 			}
//DEL 			input_of_output[i][j]=sum;
//DEL 			sum=0;
//DEL 		}
//DEL 	}
//DEL }

//DEL void NeuralNetwork::output_of_output_func()
//DEL {
//DEL     double aa;
//DEL 	for (int i=0;i<sample_num;i++)
//DEL 	{
//DEL 		for (int j=0;j<number_output;j++)
//DEL 		{
//DEL 			
//DEL 			aa=output_of_output[i][j];
//DEL 		}
//DEL 	}
//DEL }

//DEL double NeuralNetwork::error_comput()
//DEL {
//DEL  
//DEL 
//DEL 	return err_sum_temp;
//DEL 
//DEL }

void NeuralNetwork::feed()
{
 
	double output_of_hidden[max_sample][max_node];//隐含层节点的输出
	double sum=0;	
	for (int i=0;i<sample_num_temp;i++)
	{
		for (int j=0;j<number_hidden;j++)
		{
			for (int h=0;h<number_input;h++)
			{
				sum=sum+weight_1[j][h]*data_norm_temp[i][h];
			}
			output_of_hidden[i][j]=sum;
			output_of_hidden[i][j]=sigmoid(output_of_hidden[i][j]);
			sum=0;
		}
	}
	for (i=0;i<sample_num_temp;i++)
	{
		for (int j=0;j<number_output;j++)
		{
			for (int h=0;h<number_hidden;h++)
			{
				sum=sum+weight_2[j][h]*output_of_hidden[i][h];
			}
			output_of_output[i][j]=sum;
			output_of_output[i][j]=sigmoid(output_of_output[i][j]);
			sum=0;
		}
	}
	double err_temp=0;//
	double err_sum_temp=0;
	for ( i=0;i<sample_num_temp;i++)
	{
		for (int j=0;j<number_output;j++)
		{
			err_temp=err_temp+fabs(output_of_output[i][j]-data_out_binary[i][j]);
		}
		err[i]=err_temp;
		err_temp=0;
		err_sum_temp=err_sum_temp+err[i];
	}
	err_sum=err_sum_temp;

}

BOOL NeuralNetwork::LoadWeight(CString file_w1,CString file_w2)
{
    CFile f_w1,f_w2;//文件变量
	DWORD len;//文件长度
	char buf[10000];
	CString cweight;

//读取w1
	f_w1.Open(file_w1,CFile::modeRead);
	len=f_w1.GetLength();
	f_w1.Read(buf,len);
	f_w1.Close();
	cweight.Empty();
	for (int i=0;i<(int)len;i++)
		cweight=cweight+buf[i];
//	
    double data;//临时保存数据
	int count=0;
	int numb_input=0;
	int numb_hidden=0;
	CString ss;


	for (i=0;i<(int)len;i++)
	{
		if (cweight.Mid(i,1)!="\n")
			count++;
		if (cweight.Mid(i,1)=="\n")
		{
		    data=atof(cweight.Mid(i-count-1,count));//
		    count=0;
			numb_input++;

		    if ((fmod((numb_input-1),number_input)==0)&&((numb_input-1)!=0))
			{
				numb_hidden++;
				numb_input=1;
			}
			weight_1[numb_hidden][numb_input-1]=data;
			ss.Format("weight_1[%d][%d]=%f",numb_hidden,numb_input-1,weight_1[numb_hidden][numb_input-1]);
		    AfxMessageBox(ss);
		}
	}
//读取w1结束
	if ((numb_hidden+1)!=number_hidden)
	{
		AfxMessageBox("隐含层至输入层矩阵不完整!");
		return FALSE;
	}

//读取w2
	f_w2.Open(file_w2,CFile::modeRead);
	len=f_w2.GetLength();
	f_w2.Read(buf,len);
	f_w2.Close();
	cweight.Empty();
	for ( i=0;i<(int)len;i++)
		cweight=cweight+buf[i];

    count=0;
	int numb_output=0;
	numb_hidden=0;


	for (i=0;i<(int)len;i++)
	{
		if (cweight.Mid(i,1)!="\n")
			count++;
		if (cweight.Mid(i,1)=="\n")
		{
		    data=atof(cweight.Mid(i-count-1,count));
		    count=0;
		    numb_hidden++;
		
		    if ((fmod((numb_hidden-1),number_hidden)==0)&&((numb_hidden-1)!=0))
			{
			    numb_output++;
			    numb_hidden=1;
			}
		    weight_2[numb_output][numb_hidden-1]=data;

		}
	}
//读取w2结束

	if ((numb_output+1)!=number_output)
	{
		AfxMessageBox("输出层至隐含层矩阵不完整!");
		return FALSE;
	}
	

	return TRUE;
}

BOOL NeuralNetwork::SaveWeightAsFile(CString file_w1,CString file_w2)
{
		FILE *fp_w1,*fp_w2;//保存权值
		fp_w1=fopen(file_w1,"w");
		if (fp_w1==NULL)
		{
			AfxMessageBox("没有此文件");
			return FALSE;
		}
    	for (int i=0;i<number_hidden;i++)
			for (int j=0;j<number_input;j++)
			    fprintf(fp_w1,"%3.5f\n",weight_1[i][j]);
				fclose(fp_w1);

		fp_w2=fopen(file_w2,"w");
		if (fp_w2==NULL)
		{
			AfxMessageBox("没有此文件");
			return FALSE;
		}
    	for (i=0;i<number_output;i++)
			for (int j=0;j<number_hidden;j++)
			    fprintf(fp_w2,"%3.5f\n",weight_2[i][j]);
				fclose(fp_w2);
	return TRUE;
}

NeuralNetwork::Init_weight(double a, double b)
{
	srand((size_t)time(NULL));
	for (int i=0;i<max_node;i++)
		for (int j=0;j<max_node;j++)
		{
			weight_1[i][j]=mrand(a,b);
			weight_2[i][j]=mrand(a,b);

		}

}

void NeuralNetwork::Empty()
{
    int i=0,j=0;
	for (i=0;i<100;i++)
	{
		for (j=0;j<100;j++)
		{
			data_in[i][j]=0;
			data_file[i][j]=0;
			data_norm[i][j]=0;
		}
		data_out[i]=0;
	}
}



NeuralNetwork::Getdata(CString filename)
{
    CFile fp;
	CString sdata;
    DWORD len;//记录文件长度
	char  buf[10000];//读取的字符串

	fp.Open(filename,CFile::modeRead);
	len=fp.GetLength();
    fp.Read(buf,len);
    fp.Close();

	sdata.Empty();
	for (int i=0;i<int(len);i++)
	{
		sdata=sdata+buf[i];
	}

	int di=0;
	int character_num1=0,character_num2;
	sample_num=0;
	int count=0;

    int data_len=sdata.GetLength();
    while (di<data_len)
	{
		if (sdata.Mid(di,1)!=' ')
		    count++;
		if (sdata.Mid(di,1)==' ')
		{
			data_file[sample_num][character_num1]=atof(sdata.Mid(di-count,count));
			count=0;
			character_num1++;
		}
        if (sdata.Mid(di,1)=='\n')
		{
			data_file[sample_num][character_num1]=atof(sdata.Mid(di-count+1,count-2));
			sample_num++;
			if ((sample_num>1) && (character_num2!=character_num1))
			{
				AfxMessageBox("数据错误,数据不完整,请检查!");
				return 0;
			}
            character_num2=character_num1;
			character_num1=0;
			count=0;
		}
		di++;
	}
    character_num=character_num2;
	number_input=character_num;
//将样本分离为输入和输出样本
	CString ss;
	for (i=0;i<sample_num;i++)
	{
		for (int j=1;j<character_num;j++)
		{
			data_in[i][j-1]=data_file[i][j];

		}
		data_out[i]=data_file[i][0];
	}
	output_num=2;
	number_output=output_num;


}
//正规化数据
NeuralNetwork::norm(CString filename)
{
     normnofile();
	SaveDatanormAsFile(filename);

}
//数据预处理
NeuralNetwork::datapretreat()
{
 /*   int i,j,num;//num 记录相同的数据
	int m=0;//相同样本数目
	for (i=0;i<sample_num-2;i++)
	{
		for (j=i+1;j<sample_num;j++)
		{
			for (int h=0;h<character_num-1;h++)
			{
			    if (data_norm[i][h]==data_norm[j][h])
				{
					num++;
				}
			}
			if (num==character_num-1)
			{
				same_sample_no[m][1]=i;
				same_sample_no[m][2]=j;
				m++;
			}
			num=0;
			
		}
	}
    
int num2=0,num3=0;
	for (j=0;j<character_num-1;j++)
	{
        for (i=0;i<sample_num;i++)
		{
			if (data_norm[i][j]==data_norm[1][j])
			{
				num2++;
			}
		}
		if (num2==(character_num-1))
		{
		    unchange_char[num3]=j;
			num3++;
		}
		if (num2!=(character_num-1))
		{
            
			for (int h=0;h<sample_num;h++)
			{
				treated_data_norm[h][treated_char_num]=data_norm[h][j];
				treated_char_num++;
			}
		}
		num2=0;
	}*/
}

NeuralNetwork::MinDis()
{
	double dis,dissum[50][50];
	for (int i=0;i<50;i++)
	{
		for (int j=0;j<50;j++)
		{
		    dissum[i][j]=0;
		}
	}
	 
	for (i=0;i<test_num;i++)
	 {
		 for (int j=0;j<sample_num;j++)
		 {
			 for (int h=0;h<character_num;h++)
			 {
				 dis=fabs(data_norm_test[i][h]-data_norm[j][h]);
				 dissum[i][j]=dissum[i][j]+dis;
				 dis=0;
			 }
		 }
	}

	for (i=0;i<test_num;i++)
	{
		min[i][0]=dissum[i][0];
		min[i][1]=0;
	}

	for (i=0;i<test_num;i++)
	{
		for (int j=1;j<sample_num;j++)
		{
			if (dissum[i][j]<=min[i][0]) 
			{
				min[i][0]=dissum[i][j];
				min[i][1]=j+1;
			}
		}
	}

    int count=0;
	for (i=0;i<test_num;i++)
	{
		if (min[i][1]==data_out_test[i])
		{

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -