patternhw4doc.cpp

来自「Neural Network program for pattern class」· C++ 代码 · 共 985 行 · 第 1/3 页

CPP
985
字号
	/********************************************************************/
	
	/********************************************************************/
	/* Normalize the input data value									*/
	/********************************************************************/
	for(i=0;i<75;i++) {
		Data[i].sepalLength = ( Data[i].sepalLength - sepalL_min ) / sepalL_dif;
		Data[i].sepalWidth  = ( Data[i].sepalWidth  - sepalW_min ) / sepalW_dif;
		Data[i].petalLength = ( Data[i].petalLength - petalL_min ) / petalL_dif;
		Data[i].petalWidth  = ( Data[i].petalWidth  - petalW_min ) / petalW_dif;
	}
	/********************************************************************/
}

/************************************************************************/
/* EnterData(int mode)													*/
/* Name: EnterData														*/
/* Parameter: int mode - Training or Test mode							*/
/* Return: No															*/
/* Explain: Enter data to Certain type of structure						*/
/************************************************************************/
void CPatternHW4Doc::EnterData(int mode)
{
	register int i;

	if(mode == TRAINMODE){								// Training Mode
		for(i=0;i<25;i++){
			TrData[i].sepalLength	 = m_Data1[i][0];
			TrData[i].sepalWidth	 = m_Data1[i][1];
			TrData[i].petalLength	 = m_Data1[i][2];
			TrData[i].petalWidth	 = m_Data1[i][3];
			TrData[i].CLdata[0]		 = m_Data1[i][4];
			TrData[i].CLdata[1]		 = m_Data1[i][5];
			TrData[i].CLdata[2]		 = m_Data1[i][6];
			TrData[i+25].sepalLength = m_Data2[i][0];
			TrData[i+25].sepalWidth	 = m_Data2[i][1];
			TrData[i+25].petalLength = m_Data2[i][2];
			TrData[i+25].petalWidth	 = m_Data2[i][3];
			TrData[i+25].CLdata[0]	 = m_Data2[i][4];
			TrData[i+25].CLdata[1]	 = m_Data2[i][5];
			TrData[i+25].CLdata[2]	 = m_Data2[i][6];
			TrData[i+50].sepalLength = m_Data3[i][0];
			TrData[i+50].sepalWidth	 = m_Data3[i][1];
			TrData[i+50].petalLength = m_Data3[i][2];
			TrData[i+50].petalWidth	 = m_Data3[i][3];
			TrData[i+50].CLdata[0]	 = m_Data3[i][4];
			TrData[i+50].CLdata[1]	 = m_Data3[i][5];
			TrData[i+50].CLdata[2]	 = m_Data3[i][6];
		}
	}else if(mode == TESTMODE){							// Test Mode
		for(i=0;i<25;i++){
			TstData[i].sepalLength	 = m_Data1[i][0];
			TstData[i].sepalWidth	 = m_Data1[i][1];
			TstData[i].petalLength	 = m_Data1[i][2];
			TstData[i].petalWidth	 = m_Data1[i][3];
			TstData[i].CLdata[0]	 = m_Data1[i][4];
			TstData[i].CLdata[1]	 = m_Data1[i][5];
			TstData[i].CLdata[2]	 = m_Data1[i][6];
			TstData[i+25].sepalLength= m_Data2[i][0];
			TstData[i+25].sepalWidth = m_Data2[i][1];
			TstData[i+25].petalLength= m_Data2[i][2];
			TstData[i+25].petalWidth = m_Data2[i][3];
			TstData[i+25].CLdata[0]	 = m_Data2[i][4];
			TstData[i+25].CLdata[1]	 = m_Data2[i][5];
			TstData[i+25].CLdata[2]	 = m_Data2[i][6];
			TstData[i+50].sepalLength= m_Data3[i][0];
			TstData[i+50].sepalWidth = m_Data3[i][1];
			TstData[i+50].petalLength= m_Data3[i][2];
			TstData[i+50].petalWidth = m_Data3[i][3];
			TstData[i+50].CLdata[0]	 = m_Data3[i][4];
			TstData[i+50].CLdata[1]	 = m_Data3[i][5];
			TstData[i+50].CLdata[2]	 = m_Data3[i][6];
		}
	}
}

/****************************************************************************************************************/
/* InitMLPLayer(Layer* current_layer, int mode, int nd, double Momentum, double Leaning_rate, Layer *prev_Layer)*/
/* Name: InitMLPLayer																							*/
/* Parameter: Layer* current_layer - current layer																*/
/*			  int mode - Input, Hidden, or Output mode															*/
/*			  int nd - The number of nodes for current layer													*/
/*			  double Momentum - Momentum of current layer														*/
/*			  double Learning_rate - Learning rate of classifier												*/
/*			  Layer *prev_Layer - The pointer of previous layer													*/
/* Return: No																									*/
/* Explain: Initialize the layer for proper mode, nodes, momentum, etc.											*/	
/****************************************************************************************************************/
void CPatternHW4Doc::InitMLPLayer(Layer* current_layer, int mode, int nd, double Momentum, double Leaning_rate, Layer *prev_Layer)
{
	/************************************************************************/
	/* Initialize the State, Variance, and weight vector					*/
	/************************************************************************/
	for(int i=0;i<MAXNODES;i++){
		for(int j=0;j<MAXNODES;j++)
		{
			current_layer->Y[i]				=	0	;
			current_layer->var_W[i][j]		=	0	;
			current_layer->W[i][j]			=	-1+0.25*j;
		}
	}
	/************************************************************************/
	
	/************************************************************************/
	/* Set up the other characters of layer									*/
	/************************************************************************/
	current_layer->layer_mode		=	mode;				// Mode of layer
	current_layer->nodes			=	nd;					// The number of nodes for layer
	current_layer->momentum			=	Momentum;			// Momentum
	current_layer->learning_rate	=	Leaning_rate;		// Learning Rate
	current_layer->prev_layer		=	prev_Layer;			// The address of previous layer
	/************************************************************************/
}

/************************************************************************/
/* SetInputvalue(Layer* current_layer, double *data)					*/
/* Name: SetInputvalue													*/
/* Parameter: Layer* current_layer - current layer address				*/
/*			  double *data - input data									*/
/* Return: Return true if success or return false if failed				*/
/* Explain: Set up the Y value for current layer						*/
/************************************************************************/
BOOL CPatternHW4Doc::SetInputvalue(Layer* current_layer, double *data)
{
	/************************************************************************/
	/* If current layer is input layer, insert input data to Y array		*/
	/************************************************************************/
	if(current_layer->layer_mode != INPUTMODE){				// If current layer is not input layer
		return FALSE;										// Return false
	}else{													// If current layer is input layer
		for(int i=0;i<current_layer->nodes;i++)				// For all nodes in curren layer,
			current_layer->Y[i]=data[i];					// enter input data to Y array
		current_layer->Y[current_layer->nodes]=-1;			// For augment input sample, enter '-1'
	}
	return TRUE;
	/************************************************************************/
}

/************************************************************************/
/* UpdateY(Layer* current_layer)										*/
/* Name: UpdateY														*/
/* Parameter: Layer* current_layer - current layer address				*/
/* Return: No															*/
/* Explain: Update the Y value for each iteration						*/
/************************************************************************/
void CPatternHW4Doc::UpdateY(Layer* current_layer)
{
	int i,j;
	double	v;
	
	if(current_layer->layer_mode == INPUTMODE)				// Check the mode of current layer
		return ;	
	
	for(i=0;i<current_layer->nodes;i++){					// For all nodes in current layer, 
		v = 0;												// Initialize
		for(j=0;j<current_layer->prev_layer->nodes+1;j++){
			v+=current_layer->prev_layer->Y[j]*current_layer->W[j][i];
		}													// Calculate the sum of multiples of Y[prev] and W[crnt]
		current_layer->Y[i]=Sigmoid(v);						// Unipolar Sigmoidal Function
	}
	current_layer->Y[current_layer->nodes]=-1;				// For augment sample, enter '-1'
	return ;
}

/************************************************************************/
/* Backward(Layer* current_layer, double *Error)						*/
/* Name: Backward														*/
/* Parameter: Layer* current_layer - current layer address				*/
/*			  double *Error - Error vector								*/
/* Return: No															*/
/* Explain: Backward Propagation for output layer						*/
/************************************************************************/
void CPatternHW4Doc::Backward(Layer* current_layer, double *Error)
{
	if(current_layer->layer_mode != OUTPUTMODE)		// For only the output layer
		return ;
	
	for(int i=0;i<current_layer->nodes;i++)			// For all nodes in output layer,
		current_layer->delta[i]=current_layer->Y[i]*(1-current_layer->Y[i])*Error[i];
													// delta = Y*(1-Y)*error
	return;	
}

/************************************************************************/
/* Backward(Layer* current_layer, Layer *next_layer)					*/
/* Name: Backward														*/
/* Parameter: Layer* current_layer - current layer address				*/
/*			  Layer *next_layer - next layer address					*/
/* Return: No															*/
/* Explain: Backward Propagation for hidden layers						*/
/************************************************************************/
void CPatternHW4Doc::Backward(Layer *current_layer, Layer *next_layer)
{
	double Sum;
	
	if(current_layer->layer_mode == OUTPUTMODE)				// Only hidden layer
		return ;
	
	// delta_i = y_i*(1-y_i)*Sum(delta_k(l+1)*W_ki(l+1))
	for(int i=0;i<current_layer->nodes+1;i++){				// For all nodes in current layer
		Sum=0;
		for(int j=0;j<next_layer->nodes;j++)
			Sum+=next_layer->delta[j]*next_layer->W[i][j];	// Sum of multiplication of delta and W
		current_layer->delta[i]=current_layer->Y[i]*(1-current_layer->Y[i])*Sum;
	}														// Delta[cur] = Y[cur]*(1-Y[cur])*Error(=Sum)
	
	return;
}

/************************************************************************/
/* UpdateW(Layer *current_layer)										*/
/* Name: UpdateW														*/
/* Parameter: Layer* current_layer - current layer address				*/
/* Return: No															*/
/* Explain: Update the weight vector									*/
/************************************************************************/
void CPatternHW4Doc::UpdateW(Layer *current_layer)
{
	double old_W;
	
	if(current_layer->layer_mode == INPUTMODE)					// Only for output and hidden layer
		return ;
	
	for(int i=0;i<current_layer->nodes+1;i++){					// For all nodes in current layer
		for(int j=0;j<current_layer->prev_layer->nodes+1;j++)	// For all nodes in previous layer
		{
			old_W = current_layer->W[j][i];						// Save the old weight vector
			current_layer->W[j][i]=current_layer->W[j][i]+current_layer->learning_rate
				*current_layer->delta[i]*current_layer->prev_layer->Y[j]
				+current_layer->momentum*current_layer->var_W[j][i];// Weight vector Update
			current_layer->var_W[j][i]=current_layer->W[j][i]-old_W;// Calculate the variance of weight vector
		}
	}
	return;
}

/************************************************************************************/
/* Test(int hidLay, double learnRate, int hidNode, double momen, double threshold)	*/
/* Name: Test																		*/
/* Parameter: int hidLay - the number of hidden layer								*/
/*			  double learnRate - learning rate										*/
/*			  int hidNode - The number of nodes in current layer					*/
/*			  double momen - momentum alpha value									*/
/*			  double threshold - threshold value									*/
/* Return: No																		*/
/* Explain: Test module																*/
/************************************************************************************/
void CPatternHW4Doc::Test(int hidLay, double learnRate, int hidNode, double momen, double threshold)
{
	int i,j;
	double *Feed = new double[4];
	double err[3];
	
	MLP(hidLay, learnRate, hidNode, momen, threshold);			// Make non-linear Classifier
	
	Normalize(TstData);											// Input Test Data Normalization
	
	for(i=0;i<75;i++) {
		/************************************************/
		/* 1. Feed input samples						*/
		/************************************************/
		Feed[0]=TstData[i].sepalLength;		// Sepal Length
		Feed[1]=TstData[i].sepalWidth;		// Sepal Width
		Feed[2]=TstData[i].petalLength;		// Petal Length
		Feed[3]=TstData[i].petalWidth;		// Petal Width
		SetInputvalue(&InputLayer,Feed);	// Set up the input value
		/************************************************/
		
		/************************************************/
		/* 2. Forward Computation						*/
		/************************************************/
		for(int k=0;k<hidLay;k++) {
			UpdateY(&HiddenLayer[k]);	// Update the hidden layer
		}
		UpdateY(&OutputLayer);			// Update the output layer
		/************************************************/

		/************************************************/
		/* 3. Calculate the result of testing			*/
		/************************************************/
		for(j=0;j<3;j++) {
			Test_Result[i][j]=OutputLayer.Y[j];
		}
		/************************************************/
		
		/************************************************/
		/* Calculation of error							*/
		/************************************************/
		for(j=0;j<3;j++) {												
			err[j]=TstData[i].CLdata[j]-OutputLayer.Y[j];
		}
		m_SSE[i]=0;
		for(j=0;j<3;j++){
			m_SSE[i]+=(err[j]*err[j]/2.0);
		}
		/************************************************/
	}
	
	delete Feed;
}

/************************************************************************/
/* OnRegressinput()														*/
/* Name: OnRegressinput													*/
/* Parameter: No														*/
/* Return: No															*/
/* Explain: File Open for Regressor										*/
/************************************************************************/
void CPatternHW4Doc::OnRegressinput() 
{
	register int i, j;
	FILE *fp;
	char *szFilter = "ASC file (*.asc)|*.asc|All files (*.*)|*.*||";
	CFileDialog pDlg(TRUE,NULL,NULL,OFN_HIDEREADONLY|OFN_ALLOWMULTISELECT,szFilter);

	if(pDlg.DoModal() == IDOK){
		if(m_DataR == NULL){
			m_DataR = new float*[225];
			m_DataR[0] = new float[225*3];
			for(i=1;i<225;i++)	m_DataR[i] = m_DataR[i-1] + 3;
			memset(m_DataR[0],0,225*3*sizeof(float));	
		}
		fp=fopen(pDlg.GetPathName(), "rb");
		for(i=0;i<225;i++){
			for(j=0;j<3;j++){
				fscanf(fp,"%f",&m_DataR[i][j]);
			}
		}
		fclose(fp);

⌨️ 快捷键说明

复制代码Ctrl + C
搜索代码Ctrl + F
全屏模式F11
增大字号Ctrl + =
减小字号Ctrl + -
显示快捷键?