⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 lineardiscriminant.cpp

📁 线形判别分析算法和knn最近邻算法的实现
💻 CPP
字号:
#include <stdio.h>
#include <stdlib.h>
#include <iostream.h>
using namespace std;
#define TOTAL_NUMBER 50//样本总数
#define TRAINING_NUMBER 30//用于训练的样本数量
#define TESTING_NUMBER 20//用于测试的样本数量
#define BITMAP_SIZE 64//位图大小
#define FEATURE_NUMBER//提取的特征数量
#define START_VALUE//w各分量的初值
#define STEP_LENGTH 5//步长

char map[BITMAP_SIZE][BITMAP_SIZE];//位图
float feature[FEATURE_NUMBER+2];//特征第1位为对应的数字,第二位到倒数第二位为所有的特征,最后一位为扩展位(等于1)
int w0[FEATURE_NUMBER+1],w1[FEATURE_NUMBER+1],w4[FEATURE_NUMBER+1],w5[FEATURE_NUMBER+1],w6[FEATURE_NUMBER+1],w7[FEATURE_NUMBER+1],w8[FEATURE_NUMBER+1],w9[FEATURE_NUMBER+1];//判别函数(8类共8个)

void FeatureExtraction(int i)//处理数组map,提取特征,存入feature
{
}

void training()
{
	int i,j,f,g,h;
	int sum,get_figure;

    while(f<8*TRAINING_NUMBER){
		f=0;
		for(i=1;i<=8*TRAINING_NUMBER;i++){
			//从training_feature.txt中读取一个样本,存于数组feature中
			sum=0;
			get_figure=0;
			h=0;
			for(j=1;j<=FEATURE_NUMBER+1;j++){
				g=feature[j]*w0[j-1];
			    h=h+g;
			}
			if(h>sum){
				sum=h;
				get_figure=0;
			}

			for(j=1;j<=FEATURE_NUMBER+1;j++){
				g=feature[j]*w1[j-1];
			    h=h+g;
			}
			if(h>sum){
				sum=h;
				get_figure=1;
			}

			for(j=1;j<=FEATURE_NUMBER+1;j++){
				g=feature[j]*w4[j-1];
			    h=h+g;
			}
			if(h>sum){
				sum=h;
				get_figure=4;
			}

			for(j=1;j<=FEATURE_NUMBER+1;j++){
				g=feature[j]*w5[j-1];
			    h=h+g;
			}
			if(h>sum){
				sum=h;
				get_figure=5;
			}

			for(j=1;j<=FEATURE_NUMBER+1;j++){
				g=feature[j]*w6[j-1];
			    h=h+g;
			}
			if(h>sum){
				sum=h;
				get_figure=6;
			}

			for(j=1;j<=FEATURE_NUMBER+1;j++){
				g=feature[j]*w7[j-1];
			    h=h+g;
			}
			if(h>sum){
				sum=h;
				get_figure=7;
			}

			for(j=1;j<=FEATURE_NUMBER+1;j++){
				g=feature[j]*w8[j-1];
			    h=h+g;
			}
			if(h>sum){
				sum=h;
				get_figure=8;
			}

			for(j=1;j<=FEATURE_NUMBER+1;j++){
				g=feature[j]*w9[j-1];
			    h=h+g;
			}
			if(h>sum){
				sum=h;
				get_figure=9;
			}

			if(get_figure!=feature[0]){
				switch(get_figure)
				{
				case 0:
					{
						for(j=0;j<=FEATURE_NUMBER;j++)
							w0[j]=w0[j]+feature[j+1]*STEP_LENGTH;
					}
		        case 1:
					{
						for(j=0;j<=FEATURE_NUMBER;j++)
							w1[j]=w1[j]+feature[j+1]*STEP_LENGTH;
					}
		        case 4:
					{
						for(j=0;j<=FEATURE_NUMBER;j++)
							w4[j]=w4[j]+feature[j+1]*STEP_LENGTH;
					}
		        case 5:
					{
						for(j=0;j<=FEATURE_NUMBER;j++)
							w5[j]=w5[j]+feature[j+1]*STEP_LENGTH;
					}
		        case 6:
					{
						for(j=0;j<=FEATURE_NUMBER;j++)
							w6[j]=w6[j]+feature[j+1]*STEP_LENGTH;
					}
		        case 7:
					{
						for(j=0;j<=FEATURE_NUMBER;j++)
							w7[j]=w7[j]+feature[j+1]*STEP_LENGTH;
					}
		        case 8:
					{
						for(j=0;j<=FEATURE_NUMBER;j++)
							w8[j]=w8[j]+feature[j+1]*STEP_LENGTH;
					}
		        case 9:
					{
						for(j=0;j<=FEATURE_NUMBER;j++)
							w9[j]=w9[j]+feature[j+1]*STEP_LENGTH;
					}
		        default:break;
				}
			}
			else
				f++;
		}
	}
	return;
}

void testing()
{
	int i,j,g,h;
	int sum,get_figure;

	for(i=1;i<=8*TRAINING_NUMBER;i++){
		//从testing_feature.txt中读取一个样本,存于数组feature中
		sum=0;
		get_figure=0;
		h=0;
		for(j=1;j<=FEATURE_NUMBER+1;j++){
			g=feature[j]*w0[j-1];
		    h=h+g;
		}
		if(h>sum){
			sum=h;
			get_figure=0;
		}

		for(j=1;j<=FEATURE_NUMBER+1;j++){
			g=feature[j]*w1[j-1];
		    h=h+g;
		}
		if(h>sum){
			sum=h;
			get_figure=1;
		}

		for(j=1;j<=FEATURE_NUMBER+1;j++){
			g=feature[j]*w4[j-1];
		    h=h+g;
		}
		if(h>sum){
			sum=h;
			get_figure=4;
		}

		for(j=1;j<=FEATURE_NUMBER+1;j++){
			g=feature[j]*w5[j-1];
		    h=h+g;
		}
		if(h>sum){
			sum=h;
			get_figure=5;
		}

		for(j=1;j<=FEATURE_NUMBER+1;j++){
			g=feature[j]*w6[j-1];
		    h=h+g;
		}
		if(h>sum){
			sum=h;
			get_figure=6;
		}

		for(j=1;j<=FEATURE_NUMBER+1;j++){
			g=feature[j]*w7[j-1];
		    h=h+g;
		}
		if(h>sum){
			sum=h;
			get_figure=7;
		}

		for(j=1;j<=FEATURE_NUMBER+1;j++){
			g=feature[j]*w8[j-1];
		    h=h+g;
		}
		if(h>sum){
			sum=h;
			get_figure=8;
		}

		for(j=1;j<=FEATURE_NUMBER+1;j++){
			g=feature[j]*w9[j-1];
		    h=h+g;
		}
		if(h>sum){
			sum=h;
			get_figure=9;
		}
		cout<<feature[0]<<" "<<get_figure<<"\n";//真实值与识别值的对比
	}
}

int main()
{
	int i,j;
//----------------------特征提取---------------------
//1,选取特征
//2,输入8个文件(8个数字,每个数字一个文件,每个文件中包含50个样本)
//   输出两个文件(一个文件存训练样本提取出的8*30个特征向量,另一个文件存测试样本提取出的8*20个特征向量)
	ifstream inf;
	ofstream training_outf,testing_outf;
    training_outf.open("training_feature.txt");
	testing_outf.open("testing_feature.txt");
	for(i=0;i<=9;i++){
		switch(i)
		{
		case 0:inf.open("digit0.dat");
		case 1:inf.open("digit1.dat");
		case 4:inf.open("digit4.dat");
		case 5:inf.open("digit5.dat");
		case 6:inf.open("digit6.dat");
		case 7:inf.open("digit7.dat");
		case 8:inf.open("digit8.dat");
		case 9:inf.open("digit9.dat");
		default:break;
		}

		char c;
		inf>>noskipws;//不忽略空白,把每行最后那个'\n'也读进来
		int sample_number=0;//已提取特征的样本数,前30个用于训练,存入文件(training_feature.txt),后20个用于测试,存入文件(testing_feature.txt)
		int j;
		float d;

		while(inf>>c){
			int s=0,t=0;
			int stopline=0;

			while((inf>>c)&&(stopline!=BITMAP_SIZE)){
				if(c!='\n'){
					map[s][t]=c;
					t++;
				}
                else{
                    s++;
					t=0;
					stopline++;
				}
			}
			sample_number++;
			//处理数组map,包括提取特征,存入feature,再将feature存入文件(training_feature.txt)或(testing_feature.txt)			
			FeatureExtraction(i);//i代表数字'i'
			if(sample_number<=30){
				for(j=0;j<=FEATURE_NUMBER+1;j++){
					d=feature[j];
				    training_outf<<d;
					if(j<FEATURE_NUMBER+1)
						training_outf<<" ";
					else
						training_outf<<"\n";
				}
			}
			else{
				for(j=0;j<=FEATURE_NUMBER+1;j++){
					d=feature[j];
				    testing_outf<<d;
				    if(j<FEATURE_NUMBER+1)
						testning_outf<<" ";
					else
						testing_outf<<"\n";
			}
		}
		inf.close();
	}
	training_outf.close();
	testing_outf.close();

//----------------------训练-------------------------
//1,设定一个步长STEP_LENGTH
//2,为每一个判别函数(8类共8个)的系数向量w设初值,维数=特征个数+1(对应判别函数中的常数)
/*3,用训练样本训练判别函数,每一步都是把一个样本作为8个函数的输入,分别计算输出,
     得出分类结果(输出值最大的那个类),如果分类结果正确,则无需修改函数的系数w向量,
	 如果分类结果错误,则修改应分得的类的判别函数的系数w,w是一个向量,对于每一个
	 分量,都增加一个因子:(训练样本的相应分量*步长STEP_LENGTH)
*/
//4,循环,每次循环都要使用全部的训练样本(8*30个),直到在一次循环中10个判别函数的系数都不改变
	for(i=0;i<=FEATURE_NUMBER){
		w0[i]=START_VALUE;
	}
	for(i=0;i<=FEATURE_NUMBER){
		w1[i]=START_VALUE;
	}
	for(i=0;i<=FEATURE_NUMBER){
		w4[i]=START_VALUE;
	}
	for(i=0;i<=FEATURE_NUMBER){
		w5[i]=START_VALUE;
	}
	for(i=0;i<=FEATURE_NUMBER){
		w6[i]=START_VALUE;
	}
	for(i=0;i<=FEATURE_NUMBER){
		w7[i]=START_VALUE;
	}
	for(i=0;i<=FEATURE_NUMBER){
		w8[i]=START_VALUE;
	}
	for(i=0;i<=FEATURE_NUMBER){
		w9[i]=START_VALUE;
	}
	training();

//----------------------测试-------------------------
//1,选一个测试样本作为8个函数的输入,分别计算输出,得出分类结果(输出值最大的那个类)
//2,循环,对每个测试样本计算分类结果
	testing();

	system("PAUSE");
	return 0;
}

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -