⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 somneuro.h

📁 自組織映射網路(SOM)
💻 H
字号:
#include <fstream.h>

#include <time.h>

#include <iomanip.h>

#include <math.h>

#include "array.h"

#include "randgen.h"



class SOMNeuroNet

{

protected:

	int nb_dim;

	int nb_out;

	f3D  weight;

	f2D OutClassDist;

	f1D x;

	i2D LabelClass;

	float min_dist;

	double eta, eta_rate, eta_min;

	double alpha, alpha_rate, alpha_min;

	int ResultClass;

	int nb_record;

	f2D TrainingData;

	f1D Observed;

  	void MemoryAlloc();

	void MemoryFree();



public:

	i2D CategoryMap;

	bool vide;

	int XOut,YOut;

	SOMNeuroNet(){nb_dim=0;vide=true;}

	void Initialize(int n1, int n2);

	void Initialize(ifstream& in);

	void LoadTrainingData(ifstream& in, int code_size);

	void LoadTrainingData(int cote_size, f2D& Data, f1D& obs);

	void ApplyToTestingData(ifstream& in, ofstream& out);

	void ApplyToTestingData(f2D& Data, f1D& infered);

	void ApplyToTestingData(f2D& Data, f1D& infered, f1D &dist);

	float CalculCategoryMap();

	void SetRandomWeight();

	void SetRandomWeight(unsigned long seed);

	void SetupPara(double& e1,double& e2,double& e3,double& r1,double& r2,double& r3);

	void GetPara(double& e1,double& e2,double& e3,double& r1,double& r2,double& r3);

	void SetData(f1D &d){x=d;}

	float LearningFromBatchData();

	void BatchLearning();

	float Learning(f2D &d, int nb_cycle);

	inline int Learning();

	void Apply(f1D &xx, float &yy, float &dd);

	inline int Apply(f1D &xx);

	inline int Apply();

	int GetClusterIndex(f1D &Data);

	int CategoryMapApply(f1D &Data);

	void Save(ofstream& out);

	void SaveCategoryMap(ofstream& out);

	void OpenCategoryMap(ifstream& in);

	int Get_nb_dim(){return nb_dim;}

	void ReflushInitialCondition(unsigned long seed);

	void ConvertToBKTrainingData(f2D& DataIn, ofstream& out);

};



void SOMNeuroNet::MemoryFree()

{

	x.Finish();

	weight.Finish();

	OutClassDist.Finish();

	LabelClass.Finish();

	CategoryMap.Finish();

	vide=true;

}



void SOMNeuroNet::MemoryAlloc()

{

	x.Initialize(nb_dim);

	weight.Initialize(nb_dim,nb_out,nb_out);

	OutClassDist.Initialize(nb_out,nb_out);

 	LabelClass.Initialize(nb_out,nb_out);

	CategoryMap.Initialize(nb_out,nb_out);

	vide=false;

}



void SOMNeuroNet::Initialize(int n1, int n2)

{

	if(!vide)MemoryFree();

	int i,j;

	nb_dim=n1;

	nb_out=n2;

	MemoryAlloc();

	SetRandomWeight();

	eta=1.0;

	eta_rate=0.997;

	eta_min=0.0001;

	alpha=2.5;

	alpha_rate=0.997;

	alpha_min=0.0001;



	int count=0;

	for(i=0;i<nb_out;i++)for(j=0;j<nb_out;j++)

	{

		count++;

		LabelClass.m[i][j]=count;

	}

}



void SOMNeuroNet::Initialize(ifstream& in)

{

	if(!vide)MemoryFree();

	int i,j,k;

	in>>ws>>setw(10)>>nb_dim>>ws>>setw(10)>>nb_out;

	MemoryAlloc();

	for(i=0;i<nb_dim;i++)for(j=0;j<nb_out;j++)for(k=0;k<nb_out;k++)

		in>>ws>>setw(15)>>setprecision(10)>>weight.m[i][j][k];

	in>>ws>>setw(15)>>setprecision(10)>>eta>>ws>>setw(15)>>setprecision(10)>>eta_rate>>ws>>setw(15)>>setprecision(10)>>eta_min;

	in>>ws>>setw(15)>>setprecision(10)>>alpha>>ws>>setw(15)>>setprecision(10)>>alpha_rate>>ws>>setw(15)>>setprecision(10)>>alpha_min;

	int count=0;

	for(i=0;i<nb_out;i++)for(j=0;j<nb_out;j++)

	{

		count++;

		LabelClass.m[i][j]=count;

	}

}



void SOMNeuroNet::SetRandomWeight()

{

	long seed = (long)time(NULL);

	fRan32 rangen(seed);

 	for(int i=0;i<nb_dim;i++)for(int j=0;j<nb_out;j++)for(int k=0;k<nb_out;k++)

		weight.m[i][j][k]=rangen.Next(0.0,1.0);

}



void SOMNeuroNet::SetRandomWeight(unsigned long seed)

{

	fRan32 rangen(seed);

 	for(int i=0;i<nb_dim;i++)for(int j=0;j<nb_out;j++)for(int k=0;k<nb_out;k++)

		weight.m[i][j][k]=rangen.Next(0.0,1.0);

}



void SOMNeuroNet::ReflushInitialCondition(unsigned long seed)

{

	SetRandomWeight(seed);

	eta=1.0;

	eta_rate=0.997;

	eta_min=0.0001;

	alpha=2.5;

	alpha_rate=0.997;

	alpha_min=0.0001;

}



void SOMNeuroNet::SetupPara(double& e1,double& e2,double& e3,double& r1,double& r2,double& r3)

{

	eta=e1;	eta_rate=e2; eta_min=e3;

	alpha=r1; alpha_rate=r2; alpha_min=r3;

}



void SOMNeuroNet::GetPara(double& e1,double& e2,double& e3,double& r1,double& r2,double& r3)

{

	e1=eta; e2=eta_rate; e3=eta_min;

	r1=alpha; r2=alpha_rate; r3=alpha_min;

}



void SOMNeuroNet::Save(ofstream& out)

{

	int i,j,k;

	out<<setw(10)<<nb_dim<<setw(10)<<nb_out<<endl;

	for(i=0;i<nb_dim;i++)for(j=0;j<nb_out;j++)

	{

		for(k=0;k<nb_out;k++)out<<setw(15)<<setprecision(10)<<weight.m[i][j][k];

		out<<endl;

	}

	out<<setw(15)<<setprecision(10)<<eta<<setw(15)<<setprecision(10)<<eta_rate<<setw(15)<<setprecision(10)<<eta_min<<endl;

	out<<setw(15)<<setprecision(10)<<alpha<<setw(15)<<setprecision(10)<<alpha_rate<<setw(15)<<setprecision(10)<<alpha_min;

	out<<endl;

}



float SOMNeuroNet::Learning(f2D &d, int nb_cycle)

{

	int c,i,n,*err;

	int nb_example=d.nc;

	err=new int[nb_example];

	float err_sum=0.0,sum_tmp;

	int result;

	for(n=0;n<nb_example;n++)err[n]=0;

	for(c=0;c<nb_cycle-1;c++)

	{

		for(n=0;n<nb_example;n++)

		{

 //			sum_tmp=0.001;

 //			for(i=0;i<nb_dim;i++)sum_tmp=sum_tmp+fabs(d.m[i][n]);

 //			for(i=0;i<nb_dim;i++)x.m[i]=d.m[i][n]/sum_tmp;

			for(i=0;i<nb_dim;i++)x.m[i]=d.m[i][n];

			if(c==(nb_cycle-2))err[n]=Learning();

			else Learning();

		}

	}

	for(n=0;n<nb_example;n++)

	{

		sum_tmp=0.001;

		for(i=0;i<nb_dim;i++)sum_tmp=sum_tmp+fabs(d.m[i][n]);

		for(i=0;i<nb_dim;i++)x.m[i]=d.m[i][n]/sum_tmp;

		result=Learning();

		if(result!=err[n])err_sum++;

	}

	delete err;

	return err_sum*100./float(nb_example);

}



int SOMNeuroNet::Learning()

{

	int i,j,k;

	float tmp1,sum,min,r_from_win,facteur;

//

//  Calculer y

//

	for(k=0;k<nb_out;k++)for(j=0;j<nb_out;j++)

	{

		sum=0.0;

		for(i=0;i<nb_dim;i++)

		{

			tmp1=x.m[i]-weight.m[i][j][k];

			sum+=tmp1*tmp1;

		}

		OutClassDist.m[j][k]=sqrt(sum);

	}

//

// Determin the winner

//

	min=1.0e+30;

	for(k=0;k<nb_out;k++)for(j=0;j<nb_out;j++)

	if(OutClassDist.m[j][k]<min)

	{

		XOut=j;

		YOut=k;

		min=OutClassDist.m[XOut][YOut];

	}

	ResultClass = LabelClass.m[XOut][YOut];



//

// Modify weight

//

	for(j=0;j<nb_out;j++)for(k=0;k<nb_out;k++)

	{

		r_from_win=sqrt(float((j-XOut)*(j-XOut))+float((k-YOut)*(k-YOut)));

		facteur=exp(-(r_from_win/alpha));

		for(i=0;i<nb_dim;i++)weight.m[i][j][k]=weight.m[i][j][k]+eta*(x.m[i]-weight.m[i][j][k])*facteur;

	}

//

// Adapting alpha and eta

//

	alpha=alpha*alpha_rate; if(alpha<alpha_min)alpha=alpha_min;

	eta=eta*eta_rate;	if(eta<eta_min)eta=eta_min;

	return ResultClass;

}



void SOMNeuroNet::Apply(f1D &xx, float &yy, float &dd)

{

  	for(int j=0;j<nb_dim;j++)x.m[j]=xx.m[j];

	Apply();

	yy=CategoryMap.m[XOut][YOut];

	dd=min_dist;

}



int SOMNeuroNet::Apply(f1D &xx)

{

	int i,j,k;

	float sum,min;

//

//  Compute y

//

	for(k=0;k<nb_out;k++)for(j=0;j<nb_out;j++)

	{

		sum=0.0;

		for(i=0;i<nb_dim;i++)sum=sum+(xx.m[i]-weight.m[i][j][k])*(xx.m[i]-weight.m[i][j][k]);

		OutClassDist.m[j][k]=sqrt(sum);

	}

//

// Get the winner

//

	min=1.0e+30;

	for(k=0;k<nb_out;k++)for(j=0;j<nb_out;j++)

	if(OutClassDist.m[j][k]<min)

	{

		XOut=j;

		YOut=k;

		min=OutClassDist.m[XOut][YOut];

	}

   min_dist=min;

	ResultClass = LabelClass.m[XOut][YOut];

	return ResultClass;

}



int SOMNeuroNet::Apply()

{

	int i,j,k;

	float sum;

//

//  Compute y

//

	for(k=0;k<nb_out;k++)for(j=0;j<nb_out;j++)

	{

		sum=0.0;

		for(i=0;i<nb_dim;i++)sum=sum+(x.m[i]-weight.m[i][j][k])*(x.m[i]-weight.m[i][j][k]);

		OutClassDist.m[j][k]=sqrt(sum);

	}

//

// Get the winner

//

	float min=1.0e+30;

	for(k=0;k<nb_out;k++)for(j=0;j<nb_out;j++)

	if(OutClassDist.m[j][k]<min)

	{

		XOut=j;

		YOut=k;

		min=OutClassDist.m[j][k];

	}

   min_dist=min;

	ResultClass = LabelClass.m[XOut][YOut];

	return ResultClass;

}



void SOMNeuroNet::LoadTrainingData(ifstream& in, int cote_size)

{

	int i,j,n1,n2,n3;

	in>>n1>>n2>>n3;

	if(n2>1)return;

	nb_record=n3;

	if(n2==1)Observed.Initialize(nb_record);

	if(!vide)MemoryFree();

	Initialize(n1,cote_size);

	TrainingData.Initialize(nb_record,nb_dim);

	for(i=0;i<nb_record;i++)

	{

		for(j=0;j<nb_dim;j++)in>>TrainingData.m[i][j];

		if(n2==1)in>>Observed.m[i];

	}

}



void SOMNeuroNet::LoadTrainingData(int cote_size, f2D& Data, f1D& Obs)

{

	if(Data.nr!=Obs.nb)return;

	nb_out = cote_size;

	nb_dim = Data.nc;

	nb_record = Data.nr;

	if(!vide)MemoryFree();

	Initialize(nb_dim,cote_size);

	TrainingData.nr=Data.nr;

	TrainingData.nc=Data.nc;

  	TrainingData.m=Data.m;

	Observed.nb=Obs.nb;

	Observed.m = Obs.m;

}



float SOMNeuroNet::LearningFromBatchData()

{

	int c,i,n,*c1,*c2,*c3;

	c1=new int[nb_record];c2=new int[nb_record];c3=new int[nb_record];

	float sum_tmp;

	for(c=0;c<3;c++)

	{

		for(n=0;n<nb_record;n++)

		{

			for(i=0;i<nb_dim;i++)x.m[i]=TrainingData.m[n][i];

			if(c==0)c1[n]=Learning();

			else if(c==1)c2[n]=Learning();

			else c3[n]=Learning();

		}

	}

	int mis_category=0;

	for(n=0;n<nb_record;n++)if((c1[n]!=c2[n])||(c2[n]!=c3[n])||(c1[n]!=c3[n]))mis_category++;

	vide=false;

	delete c1;delete c2;delete c3;

	float err = float(mis_category)*100./float(nb_record);

	return err;

}



void SOMNeuroNet::BatchLearning()

{

	for(int n=0;n<nb_record;n++)

	{

		for(int i=0;i<nb_dim;i++)x.m[i]=TrainingData.m[n][i];

		Learning();

	}

	vide=false;

}



float SOMNeuroNet::CalculCategoryMap()

{

	if(Observed.m==0)return -1;

	int i,j,k,n;

	int nb_cla=0;



	for(i=0;i<Observed.nb;i++)if(nb_cla<Observed.m[i])nb_cla=Observed.m[i];

	nb_cla++;

	int *cp=new int[nb_cla];

	for(i=0;i<nb_cla;i++)cp[i]=0;

	for(n=0;n<nb_record;n++)cp[int(Observed.m[n])]++;



	f3D Ct;

	Ct.Initialize(nb_out,nb_out,nb_cla);

	for(i=0;i<nb_out;i++)for(j=0;j<nb_out;j++)for(k=0;k<nb_cla;k++)Ct.m[i][j][k]=0;

	int cla;

	for(n=0;n<nb_record;n++)

	{

		for(i=0;i<nb_dim;i++)x.m[i]=TrainingData.m[n][i];

		Apply();

		cla = Observed.m[n];

		Ct.m[XOut][YOut][cla] += (1.0/cp[cla]);

	}



	int max;

	for(i=0;i<nb_out;i++)for(j=0;j<nb_out;j++)

	{

		max=0;

		CategoryMap.m[i][j]=-1;

		for(k=0;k<nb_cla;k++)if(Ct.m[i][j][k]>max){max=Ct.m[i][j][k];CategoryMap.m[i][j]=k; }

	}



	int mis_cla=0;

	for(n=0;n<nb_record;n++)

	{

		for(i=0;i<nb_dim;i++)x.m[i]=TrainingData.m[n][i];

		Apply();

		if(CategoryMap.m[XOut][YOut]!=Observed.m[n])mis_cla++;

	}

	float err=float(mis_cla)/float(nb_record);

	return err;

}



void SOMNeuroNet::SaveCategoryMap(ofstream& out)

{

//	out<<nb_out<<endl;

   for(int i=0;i<nb_out;i++)

   {

   	for(int j=0;j<nb_out;j++)out<<CategoryMap.m[i][j]<<"\t";

		out<<endl;

   }

}



void SOMNeuroNet::OpenCategoryMap(ifstream& in)

{

//	in>>nb_out;

   for(int i=0;i<nb_out;i++)for(int j=0;j<nb_out;j++)in>>CategoryMap.m[i][j];

}



void SOMNeuroNet::ApplyToTestingData(ifstream& in, ofstream& out)

{

	int i,j,n1,n2,n3;

	in>>n1>>n2>>n3;

	if((n1!=nb_dim)||(n2!=nb_out))return;

	out<<"observed"<<"\t"<<"infered\n";

	int desire_y,cla;

	for(i=0;i<n3;i++)

	{

		for(j=0;j<nb_dim;j++)in>>x.m[j];

		in>>desire_y;

		Apply();

		cla=CategoryMap.m[XOut][YOut];

		out<<desire_y<<"\t"<<cla<<endl;

	}

}



void SOMNeuroNet::ApplyToTestingData(f2D& Data, f1D& infered)

{

   infered.Initialize(Data.nr);

	for(int i=0;i<Data.nr;i++)

   {

   	for(int j=0;j<nb_dim;j++)x.m[j]=Data.m[i][j];

		Apply();

      infered.m[i]=CategoryMap.m[XOut][YOut];

   }

}



void SOMNeuroNet::ApplyToTestingData(f2D& Data, f1D& infered, f1D &dist)

{

	infered.Initialize(Data.nr);

	for(int i=0;i<Data.nr;i++)

	{

		for(int j=0;j<nb_dim;j++)x.m[j]=Data.m[i][j];

		Apply();

		dist.m[i]=min_dist;

		infered.m[i]=CategoryMap.m[XOut][YOut];

	}

}



void SOMNeuroNet::ConvertToBKTrainingData(f2D& DataIn, ofstream& out)

{

	out<<DataIn.nc<<"\t"<<int(2)<<"\t"<<DataIn.nr;

	float tx,ty;

	for(int i=0;i<DataIn.nr;i++)

	{

		for(int j=0;j<DataIn.nc;j++)out<<DataIn.m[i][j]<<"\t";

		for(int j=0;j<nb_dim;j++)x.m[j]=DataIn.m[i][j];

		Apply();

		tx=float(XOut)/float(nb_out);

		ty=float(XOut)/float(nb_out);

		out<<tx<<"\t"<<ty<<endl;

	}

}



int SOMNeuroNet::CategoryMapApply(f1D &Data)

{

  	for(int j=0;j<nb_dim;j++)x.m[j]=Data.m[j];

	Apply();

	return CategoryMap.m[XOut][YOut];

}



int SOMNeuroNet::GetClusterIndex(f1D &Data)

{

  	for(int j=0;j<nb_dim;j++)x.m[j]=Data.m[j];

	return Apply();

}





⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -