⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 svm.cpp

📁 一般的支持向量机算法比较单一
💻 CPP
📖 第 1 页 / 共 3 页
字号:
		alpha2, param->C, param->C, param->eps, si, param->shrinking);	double sum_alpha = 0;	for(i=0;i<l;i++)	{		alpha[i] = alpha2[i] - alpha2[i+l];		sum_alpha += fabs(alpha[i]);	}	info("nu = %f\n",sum_alpha/(param->C*l));	delete[] alpha2;	delete[] linear_term;	delete[] y;}static void solve_nu_svr(	const svm_problem *prob, const svm_parameter *param,	double *alpha, Solver::SolutionInfo* si){	if(param->nu < 0 || param->nu > 1)	{		fprintf(stderr,"specified nu is out of range\n");		exit(1);	}	int l = prob->l;	double C = param->C;	double *alpha2 = new double[2*l];	double *linear_term = new double[2*l];	schar *y = new schar[2*l];	int i;	double sum = C * param->nu * l / 2;	for(i=0;i<l;i++)	{		alpha2[i] = alpha2[i+l] = min(sum,C);		sum -= alpha2[i];		linear_term[i] = - prob->y[i];		y[i] = 1;		linear_term[i+l] = prob->y[i];		y[i+l] = -1;	}	Solver_NU s;	s.Solve(2*l, SVR_Q(*prob,*param), linear_term, y,		alpha2, C, C, param->eps, si, param->shrinking);	info("epsilon = %f\n",-si->r);	for(i=0;i<l;i++)		alpha[i] = alpha2[i] - alpha2[i+l];	delete[] alpha2;	delete[] linear_term;	delete[] y;}//// decision_function//struct decision_function{	double *alpha;	double rho;	};decision_function svm_train_one(	const svm_problem *prob, const svm_parameter *param,	double Cp, double Cn){	double *alpha = Malloc(double,prob->l);	Solver::SolutionInfo si;	switch(param->svm_type)	{		case C_SVC:			solve_c_svc(prob,param,alpha,&si,Cp,Cn);			break;		case NU_SVC:			solve_nu_svc(prob,param,alpha,&si);			break;		case ONE_CLASS:			solve_one_class(prob,param,alpha,&si);			break;		case EPSILON_SVR:			solve_epsilon_svr(prob,param,alpha,&si);			break;		case NU_SVR:			solve_nu_svr(prob,param,alpha,&si);			break;	}	info("obj = %f, rho = %f\n",si.obj,si.rho);	// output SVs	int nSV = 0;	int nBSV = 0;	for(int i=0;i<prob->l;i++)	{		if(fabs(alpha[i]) > 0)		{			++nSV;			if(prob->y[i] > 0)			{				if(fabs(alpha[i]) >= si.upper_bound_p)					++nBSV;			}			else			{				if(fabs(alpha[i]) >= si.upper_bound_n)					++nBSV;			}		}	}	info("nSV = %d, nBSV = %d\n",nSV,nBSV);	decision_function f;	f.alpha = alpha;	f.rho = si.rho;	return f;}//// svm_model//struct svm_model{	svm_parameter param;	// parameter	int nr_class;		// number of classes, = 2 in regression/one class svm	int l;			// total #SV	svm_node **SV;		// SVs (SV[l])	double **sv_coef;	// coefficients for SVs in decision functions (sv_coef[n-1][l])	double *rho;		// constants in decision functions (rho[n*(n-1)/2])	// for classification only	int *label;		// label of each class (label[n])	int *nSV;		// number of SVs for each class (nSV[n])				// nSV[0] + nSV[1] + ... + nSV[n-1] = l	// XXX	int free_sv;		// 1 if svm_model is created by svm_load_model				// 0 if svm_model is created by svm_train};//// Interface functions//svm_model *svm_train(const svm_problem *prob, const svm_parameter *param){	svm_model *model = Malloc(svm_model,1);	model->param = *param;	model->free_sv = 0;	// XXX	if(param->svm_type == ONE_CLASS ||	   param->svm_type == EPSILON_SVR ||	   param->svm_type == NU_SVR)	{		// regression or one-class-svm		model->nr_class = 2;		model->label = NULL;		model->nSV = NULL;		model->sv_coef = Malloc(double *,1);		decision_function f = svm_train_one(prob,param,0,0);		model->rho = Malloc(double,1);		model->rho[0] = f.rho;		int nSV = 0;		int i;		for(i=0;i<prob->l;i++)			if(fabs(f.alpha[i]) > 0) ++nSV;		model->l = nSV;		model->SV = Malloc(svm_node *,nSV);		model->sv_coef[0] = Malloc(double,nSV);		int j = 0;		for(i=0;i<prob->l;i++)			if(fabs(f.alpha[i]) > 0)			{				model->SV[j] = prob->x[i];				model->sv_coef[0][j] = f.alpha[i];				++j;			}				free(f.alpha);	}	else	{		// classification		// find out the number of classes		int l = prob->l;		int max_nr_class = 16;		int nr_class = 0;		int *label = Malloc(int,max_nr_class);		int *count = Malloc(int,max_nr_class);		int *index = Malloc(int,l);		int i;		for(i=0;i<l;i++)		{			int this_label = (int)prob->y[i];			int j;			for(j=0;j<nr_class;j++)				if(this_label == label[j])				{					++count[j];					break;				}			index[i] = j;			if(j == nr_class)			{				if(nr_class == max_nr_class)				{					max_nr_class *= 2;					label = (int *)realloc(label,max_nr_class*sizeof(int));					count = (int *)realloc(count,max_nr_class*sizeof(int));				}				label[nr_class] = this_label;				count[nr_class] = 1;				++nr_class;			}		}		// group training data of the same class		int *start = Malloc(int,nr_class);		start[0] = 0;		for(i=1;i<nr_class;i++)			start[i] = start[i-1]+count[i-1];		svm_node **x = Malloc(svm_node *,l);				for(i=0;i<l;i++)		{			x[start[index[i]]] = prob->x[i];			++start[index[i]];		}				start[0] = 0;		for(i=1;i<nr_class;i++)			start[i] = start[i-1]+count[i-1];		// calculate weighted C		double *weighted_C = Malloc(double, nr_class);		for(i=0;i<nr_class;i++)			weighted_C[i] = param->C;		for(i=0;i<param->nr_weight;i++)		{				int j;			for(j=0;j<nr_class;j++)				if(param->weight_label[i] == label[j])					break;			if(j == nr_class)				fprintf(stderr,"warning: class label %d specified in weight is not found\n", param->weight_label[i]);			else				weighted_C[j] *= param->weight[i];		}		// train n*(n-1)/2 models				bool *nonzero = Malloc(bool,l);		for(i=0;i<l;i++)			nonzero[i] = false;		decision_function *f = Malloc(decision_function,nr_class*(nr_class-1)/2);		int p = 0;		for(i=0;i<nr_class;i++)			for(int j=i+1;j<nr_class;j++)			{				svm_problem sub_prob;				int si = start[i], sj = start[j];				int ci = count[i], cj = count[j];				sub_prob.l = ci+cj;				sub_prob.x = Malloc(svm_node *,sub_prob.l);				sub_prob.y = Malloc(double,sub_prob.l);				int k;				for(k=0;k<ci;k++)				{					sub_prob.x[k] = x[si+k];					sub_prob.y[k] = +1;				}				for(k=0;k<cj;k++)				{					sub_prob.x[ci+k] = x[sj+k];					sub_prob.y[ci+k] = -1;				}								f[p] = svm_train_one(&sub_prob,param,weighted_C[i],weighted_C[j]);				for(k=0;k<ci;k++)					if(!nonzero[si+k] && fabs(f[p].alpha[k]) > 0)						nonzero[si+k] = true;				for(k=0;k<cj;k++)					if(!nonzero[sj+k] && fabs(f[p].alpha[ci+k]) > 0)						nonzero[sj+k] = true;				free(sub_prob.x);				free(sub_prob.y);				++p;			}		// build output		model->nr_class = nr_class;				model->label = Malloc(int,nr_class);		for(i=0;i<nr_class;i++)			model->label[i] = label[i];				model->rho = Malloc(double,nr_class*(nr_class-1)/2);		for(i=0;i<nr_class*(nr_class-1)/2;i++)			model->rho[i] = f[i].rho;		int total_sv = 0;		int *nz_count = Malloc(int,nr_class);		model->nSV = Malloc(int,nr_class);		for(i=0;i<nr_class;i++)		{			int nSV = 0;			for(int j=0;j<count[i];j++)				if(nonzero[start[i]+j])				{						++nSV;					++total_sv;				}			model->nSV[i] = nSV;			nz_count[i] = nSV;		}				info("Total nSV = %d\n",total_sv);		model->l = total_sv;		model->SV = Malloc(svm_node *,total_sv);		p = 0;		for(i=0;i<l;i++)			if(nonzero[i]) model->SV[p++] = x[i];		int *nz_start = Malloc(int,nr_class);		nz_start[0] = 0;		for(i=1;i<nr_class;i++)			nz_start[i] = nz_start[i-1]+nz_count[i-1];		model->sv_coef = Malloc(double *,nr_class-1);		for(i=0;i<nr_class-1;i++)			model->sv_coef[i] = Malloc(double,total_sv);		p = 0;		for(i=0;i<nr_class;i++)			for(int j=i+1;j<nr_class;j++)			{				// classifier (i,j): coefficients with				// i are in sv_coef[j-1][nz_start[i]...],				// j are in sv_coef[i][nz_start[j]...]				int si = start[i];				int sj = start[j];				int ci = count[i];				int cj = count[j];								int q = nz_start[i];				int k;				for(k=0;k<ci;k++)					if(nonzero[si+k])						model->sv_coef[j-1][q++] = f[p].alpha[k];				q = nz_start[j];				for(k=0;k<cj;k++)					if(nonzero[sj+k])						model->sv_coef[i][q++] = f[p].alpha[ci+k];				++p;			}				free(label);		free(count);		free(index);		free(start);		free(x);		free(weighted_C);		free(nonzero);		for(i=0;i<nr_class*(nr_class-1)/2;i++)			free(f[i].alpha);		free(f);		free(nz_count);		free(nz_start);	}	return model;}double svm_predict(const svm_model *model, const svm_node *x){	if(model->param.svm_type == ONE_CLASS ||	   model->param.svm_type == EPSILON_SVR ||	   model->param.svm_type == NU_SVR)	{		double *sv_coef = model->sv_coef[0];		double sum = 0;		for(int i=0;i<model->l;i++)			sum += sv_coef[i] * Kernel::k_function(x,model->SV[i],model->param);		sum -= model->rho[0];		if(model->param.svm_type == ONE_CLASS)			return (sum>0)?1:-1;		else			return sum;	}	else	{		int i;		int nr_class = model->nr_class;		int l = model->l;				double *kvalue = Malloc(double,l);		for(i=0;i<l;i++)			kvalue[i] = Kernel::k_function(x,model->SV[i],model->param);		int *start = Malloc(int,nr_class);		start[0] = 0;		for(i=1;i<nr_class;i++)			start[i] = start[i-1]+model->nSV[i-1];		int *vote = Malloc(int,nr_class);		for(i=0;i<nr_class;i++)			vote[i] = 0;		int p=0;		for(i=0;i<nr_class;i++)			for(int j=i+1;j<nr_class;j++)			{				double sum = 0;				int si = start[i];				int sj = start[j];				int ci = model->nSV[i];				int cj = model->nSV[j];								int k;				double *coef1 = model->sv_coef[j-1];				double *coef2 = model->sv_coef[i];				for(k=0;k<ci;k++)					sum += coef1[si+k] * kvalue[si+k];				for(k=0;k<cj;k++)					sum += coef2[sj+k] * kvalue[sj+k];				sum -= model->rho[p++];				if(sum > 0)					++vote[i];				else					++vote[j];			}		int vote_max_idx = 0;		for(i=1;i<nr_class;i++)			if(vote[i] > vote[vote_max_idx])				vote_max_idx = i;		free(kvalue);		free(start);		free(vote);		return model->label[vote_max_idx];	}}const char *svm_type_table[] ={	"c_svc","nu_svc","one_class","epsilon_svr","nu_svr",NULL};const char *kernel_type_table[]={	"linear","polynomial","rbf","sigmoid",NULL};int svm_save_model(const char *model_file_name, const svm_model *model){	FILE *fp = fopen(model_file_name,"w");	if(fp==NULL) return -1;	const svm_parameter& param = model->param;	fprintf(fp,"svm_type %s\n", svm_type_table[param.svm_type]);	fprintf(fp,"kernel_type %s\n", kernel_type_table[param.kernel_type]);	if(param.kernel_type == POLY)		fprintf(fp,"degree %g\n", param.degree);	if(param.kernel_type == POLY || param.kernel_type == RBF || param.kernel_type == SIGMOID)		fprintf(fp,"gamma %g\n", param.gamma);	if(param.kernel_type == POLY || param.kernel_type == SIGMOID)		fprintf(fp,"coef0 %g\n", param.coef0);	int nr_class = model->nr_class;	int l = model->l;	fprintf(fp, "nr_class %d\n", nr_class);	fprintf(fp, "total_sv %d\n",l);		{		fprintf(fp, "rho");		for(int i=0;i<nr_class*(nr_class-1)/2;i++)			fprintf(fp," %g",model->rho[i]);		fprintf(fp, "\n");	}		if(model->label)	{		fprintf(fp, "label");		for(int i=0;i<nr_class;i++)			fprintf(fp," %d",model->label[i]);		fprintf(fp, "\n");	}	if(model->nSV)	{		fprintf(fp, "nr_sv");		for(int i=0;i<nr_class;i++)			fprintf(fp," %d",model->nSV[i]);		fprintf(fp, "\n");	}	fprintf(fp, "SV\n");	const double * const *sv_coef = model->sv_coef;	const svm_node * const *SV = model->SV;	for(int i=0;i<l;i++)	{		for(int j=0;j<nr_class-1;j++)			fprintf(fp, "%.16g ",sv_coef[j][i]);		const svm_node *p = SV[i];		while(p->index != -1)		{			fprintf(fp,"%d:%.8g ",p->index,p->value);			p++;		}		fprintf(fp, "\n");	}	fclose(fp);	return 0;}svm_model *svm_load_model(const char *model_file_name){	FILE *fp = fopen(model_file_name,"rb");	if(fp==NULL) return NULL;		// read parameters	svm_model *model = (svm_model *)malloc(sizeof(svm_model));	svm_parameter& param = model->param;	model->label = NULL;	model->nSV = NULL;	char cmd[81];	while(1)	{		fscanf(fp,"%80s",cmd);		if(strcmp(cmd,"svm_type")==0)		{			fscanf(fp,"%80s",cmd);			int i;			for(i=0;svm_type_table[i];i++)			{				if(strcmp(svm_type_table[i],cmd)==0)				{					param.svm_type=i;					break;				}			}			if(svm_type_table[i] == NULL)			{				fprintf(stderr,"unknown svm type.\n");				exit(1);			}		}		else if(strcmp(cmd,"kernel_type")==0)		{					fscanf(fp,"%80s",cmd);			int i;			for(i=0;kernel_type_table[i];i++)			{				if(strcmp(kernel_type_table[i],cmd)==0)				{					param.kernel_type=i;					break;				}			}			if(kernel_type_table[i] == NULL)			{				fprintf(stderr,"unknown kernel function.\n");				exit(1);			}		}		else if(strcmp(cmd,"degree")==0)			fscanf(fp,"%lf",&param.degree);		else if(strcmp(cmd,"gamma")==0)			fscanf(fp,"%lf",&param.gamma);		else if(strcmp(cmd,"coef0")==0)			fscanf(fp,"%lf",&param.coef0);		else if(strcmp(cmd,"nr_class")==0)			fscanf(fp,"%d",&model->nr_class);		else if(strcmp(cmd,"total_sv")==0)			fscanf(fp,"%d",&model->l);		else if(strcmp(cmd,"rho")==0)		{			int n = model->nr_class * (model->nr_class-1)/2;			model->rho = Malloc(double,n);			for(int i=0;i<n;i++)				fscanf(fp,"%lf",&model->rho[i]);		}		else if(strcmp(cmd,"label")==0)		{			int n = model->nr_class;			model->label = Malloc(int,n);			for(int i=0;i<n;i++)				fscanf(fp,"%d",&model->label[i]);		}		else if(strcmp(cmd,"nr_sv")==0)		{			int n = model->nr_class;			model->nSV = Malloc(int,n);			for(int i=0;i<n;i++)				fscanf(fp,"%d",&model->nSV[i]);		}		else if(strcmp(cmd,"SV")==0)		{			while(1)			{				int c = getc(fp);				if(c==EOF || c=='\n') break;				}			break;		}		else		{			fprintf(stderr,"unknown text in model file\n");			exit(1);		}	}	// read sv_coef and SV	int elements = 0;	long pos = ftell(fp);	while(1)	{		int c = fgetc(fp);		switch(c)		{			case '\n':				// count the '-1' element			case ':':				++elements;				break;			case EOF:				goto out;			default:				;		}	}out:	fseek(fp,pos,SEEK_SET);	int m = model->nr_class - 1;	int l = model->l;	model->sv_coef = Malloc(double *,m);	int i;	for(i=0;i<m;i++)		model->sv_coef[i] = Malloc(double,l);	model->SV = Malloc(svm_node*,l);	svm_node *x_space = Malloc(svm_node,elements);	int j=0;	for(i=0;i<l;i++)	{		model->SV[i] = &x_space[j];		for(int k=0;k<m;k++)			fscanf(fp,"%lf",&model->sv_coef[k][i]);		while(1)		{			int c;			do {				c = getc(fp);				if(c=='\n') goto out2;			} while(isspace(c));			ungetc(c,fp);			fscanf(fp,"%d:%lf",&(x_space[j].index),&(x_space[j].value));			++j;		}	out2:		x_space[j++].index = -1;	}	fclose(fp);	model->free_sv = 1;	// XXX	return model;}void svm_destroy_model(svm_model* model){	if(model->free_sv)		free((void *)(model->SV[0]));	for(int i=0;i<model->nr_class-1;i++)		free(model->sv_coef[i]);	free(model->SV);	free(model->sv_coef);	free(model->rho);	free(model->label);	free(model->nSV);	free(model);}

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -