⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 svm.cpp

📁 Language, Script, and Encoding Identification with String Kernel Classifiers
💻 CPP
📖 第 1 页 / 共 5 页
字号:
{	int l = prob->l;	int max_nr_class = 16;	int nr_class = 0;	int *label = Malloc(int,max_nr_class);	int *count = Malloc(int,max_nr_class);	int *data_label = Malloc(int,l);	int i;	for(i=0;i<l;i++)	{		int this_label = (int)prob->y[i];		int j;		for(j=0;j<nr_class;j++)		{			if(this_label == label[j])			{				++count[j];				break;			}		}		data_label[i] = j;		if(j == nr_class)		{			if(nr_class == max_nr_class)			{				max_nr_class *= 2;				label = (int *)realloc(label,max_nr_class*sizeof(int));				count = (int *)realloc(count,max_nr_class*sizeof(int));			}			label[nr_class] = this_label;			count[nr_class] = 1;			++nr_class;		}	}	int *start = Malloc(int,nr_class);	start[0] = 0;	for(i=1;i<nr_class;i++)		start[i] = start[i-1]+count[i-1];	for(i=0;i<l;i++)	{		perm[start[data_label[i]]] = i;		++start[data_label[i]];	}	start[0] = 0;	for(i=1;i<nr_class;i++)		start[i] = start[i-1]+count[i-1];	*nr_class_ret = nr_class;	*label_ret = label;	*start_ret = start;	*count_ret = count;	free(data_label);}//// Interface functions//svm_model *svm_train(const svm_problem *prob, const svm_parameter *param){	svm_model *model = Malloc(svm_model,1);	model->param = *param;	model->free_sv = 0;	// XXX	if(param->svm_type == ONE_CLASS ||	   param->svm_type == EPSILON_SVR ||	   param->svm_type == NU_SVR)	{		// regression or one-class-svm		model->nr_class = 2;		model->label = NULL;		model->nSV = NULL;		model->probA = NULL; model->probB = NULL;		model->sv_coef = Malloc(double *,1);		if(param->probability && 		   (param->svm_type == EPSILON_SVR ||		    param->svm_type == NU_SVR))		{			model->probA = Malloc(double,1);			model->probA[0] = svm_svr_probability(prob,param);		}		decision_function f = svm_train_one(prob,param,0,0);		model->rho = Malloc(double,1);		model->rho[0] = f.rho;		int nSV = 0;		int i;		for(i=0;i<prob->l;i++)			if(fabs(f.alpha[i]) > 0) ++nSV;		model->l = nSV;		model->SV = Malloc(svm_node *,nSV);		model->sv_coef[0] = Malloc(double,nSV);		int j = 0;		for(i=0;i<prob->l;i++)			if(fabs(f.alpha[i]) > 0)			{				model->SV[j] = prob->x[i];				model->sv_coef[0][j] = f.alpha[i];				++j;			}				free(f.alpha);	}	else	{		// classification		int l = prob->l;		int nr_class;		int *label = NULL;		int *start = NULL;		int *count = NULL;		int *perm = Malloc(int,l);		// group training data of the same class		svm_group_classes(prob,&nr_class,&label,&start,&count,perm);		svm_node **x = Malloc(svm_node *,l);		int i;		for(i=0;i<l;i++)			x[i] = prob->x[perm[i]];		// calculate weighted C		double *weighted_C = Malloc(double, nr_class);		for(i=0;i<nr_class;i++)			weighted_C[i] = param->C;		for(i=0;i<param->nr_weight;i++)		{			int j;			for(j=0;j<nr_class;j++)				if(param->weight_label[i] == label[j])					break;			if(j == nr_class)				fprintf(stderr,"warning: class label %d specified in weight is not found\n", param->weight_label[i]);			else				weighted_C[j] *= param->weight[i];		}		// train k*(k-1)/2 models				bool *nonzero = Malloc(bool,l);		for(i=0;i<l;i++)			nonzero[i] = false;		decision_function *f = Malloc(decision_function,nr_class*(nr_class-1)/2);		double *probA=NULL,*probB=NULL;		if (param->probability)		{			probA=Malloc(double,nr_class*(nr_class-1)/2);			probB=Malloc(double,nr_class*(nr_class-1)/2);		}		int p = 0;		for(i=0;i<nr_class;i++)			for(int j=i+1;j<nr_class;j++)			{				svm_problem sub_prob;				int si = start[i], sj = start[j];				int ci = count[i], cj = count[j];				sub_prob.l = ci+cj;				sub_prob.x = Malloc(svm_node *,sub_prob.l);				sub_prob.y = Malloc(double,sub_prob.l);				int k;				for(k=0;k<ci;k++)				{					sub_prob.x[k] = x[si+k];					sub_prob.y[k] = +1;				}				for(k=0;k<cj;k++)				{					sub_prob.x[ci+k] = x[sj+k];					sub_prob.y[ci+k] = -1;				}				if(param->probability)					svm_binary_svc_probability(&sub_prob,param,weighted_C[i],weighted_C[j],probA[p],probB[p]);				f[p] = svm_train_one(&sub_prob,param,weighted_C[i],weighted_C[j]);				for(k=0;k<ci;k++)					if(!nonzero[si+k] && fabs(f[p].alpha[k]) > 0)						nonzero[si+k] = true;				for(k=0;k<cj;k++)					if(!nonzero[sj+k] && fabs(f[p].alpha[ci+k]) > 0)						nonzero[sj+k] = true;				free(sub_prob.x);				free(sub_prob.y);				++p;			}		// build output		model->nr_class = nr_class;				model->label = Malloc(int,nr_class);		for(i=0;i<nr_class;i++)			model->label[i] = label[i];				model->rho = Malloc(double,nr_class*(nr_class-1)/2);		for(i=0;i<nr_class*(nr_class-1)/2;i++)			model->rho[i] = f[i].rho;		if(param->probability)		{			model->probA = Malloc(double,nr_class*(nr_class-1)/2);			model->probB = Malloc(double,nr_class*(nr_class-1)/2);			for(i=0;i<nr_class*(nr_class-1)/2;i++)			{				model->probA[i] = probA[i];				model->probB[i] = probB[i];			}		}		else		{			model->probA=NULL;			model->probB=NULL;		}		int total_sv = 0;		int *nz_count = Malloc(int,nr_class);		model->nSV = Malloc(int,nr_class);		for(i=0;i<nr_class;i++)		{			int nSV = 0;			for(int j=0;j<count[i];j++)				if(nonzero[start[i]+j])				{						++nSV;					++total_sv;				}			model->nSV[i] = nSV;			nz_count[i] = nSV;		}				info("Total nSV = %d\n",total_sv);		model->l = total_sv;		model->SV = Malloc(svm_node *,total_sv);		p = 0;		for(i=0;i<l;i++)			if(nonzero[i]) model->SV[p++] = x[i];		int *nz_start = Malloc(int,nr_class);		nz_start[0] = 0;		for(i=1;i<nr_class;i++)			nz_start[i] = nz_start[i-1]+nz_count[i-1];		model->sv_coef = Malloc(double *,nr_class-1);		for(i=0;i<nr_class-1;i++)			model->sv_coef[i] = Malloc(double,total_sv);		p = 0;		for(i=0;i<nr_class;i++)			for(int j=i+1;j<nr_class;j++)			{				// classifier (i,j): coefficients with				// i are in sv_coef[j-1][nz_start[i]...],				// j are in sv_coef[i][nz_start[j]...]				int si = start[i];				int sj = start[j];				int ci = count[i];				int cj = count[j];								int q = nz_start[i];				int k;				for(k=0;k<ci;k++)					if(nonzero[si+k])						model->sv_coef[j-1][q++] = f[p].alpha[k];				q = nz_start[j];				for(k=0;k<cj;k++)					if(nonzero[sj+k])						model->sv_coef[i][q++] = f[p].alpha[ci+k];				++p;			}				free(label);		free(probA);		free(probB);		free(count);		free(perm);		free(start);		free(x);		free(weighted_C);		free(nonzero);		for(i=0;i<nr_class*(nr_class-1)/2;i++)			free(f[i].alpha);		free(f);		free(nz_count);		free(nz_start);	}	return model;}// Stratified cross validationvoid svm_cross_validation(const svm_problem *prob, const svm_parameter *param, int nr_fold, double *target){	int i;	int *fold_start = Malloc(int,nr_fold+1);	int l = prob->l;	int *perm = Malloc(int,l);	int nr_class;	if(param->svm_type == C_SVC ||	   param->svm_type == NU_SVC)	{		int *start = NULL;		int *label = NULL;		int *count = NULL;		svm_group_classes(prob,&nr_class,&label,&start,&count,perm);		// random shuffle and then data grouped by fold using the array perm		int *fold_count = Malloc(int,nr_fold);		int c;		int *index = Malloc(int,l);		for(i=0;i<l;i++)			index[i]=perm[i];		for (c=0; c<nr_class; c++) 			for(i=0;i<count[c];i++)			{				int j = i+rand()%(count[c]-i);				swap(index[start[c]+j],index[start[c]+i]);			}		for(i=0;i<nr_fold;i++)		{			fold_count[i] = 0;			for (c=0; c<nr_class;c++)				fold_count[i]+=(i+1)*count[c]/nr_fold-i*count[c]/nr_fold;		}		fold_start[0]=0;		for (i=1;i<=nr_fold;i++)			fold_start[i] = fold_start[i-1]+fold_count[i-1];		for (c=0; c<nr_class;c++)			for(i=0;i<nr_fold;i++)			{				int begin = start[c]+i*count[c]/nr_fold;				int end = start[c]+(i+1)*count[c]/nr_fold;				for(int j=begin;j<end;j++)				{					perm[fold_start[i]] = index[j];					fold_start[i]++;				}			}		fold_start[0]=0;		for (i=1;i<=nr_fold;i++)			fold_start[i] = fold_start[i-1]+fold_count[i-1];		free(start);			free(label);		free(count);			free(index);		free(fold_count);	}	else	{		for(i=0;i<l;i++) perm[i]=i;		for(i=0;i<l;i++)		{			int j = i+rand()%(l-i);			swap(perm[i],perm[j]);		}		for(i=0;i<=nr_fold;i++)			fold_start[i]=i*l/nr_fold;	}	for(i=0;i<nr_fold;i++)	{		int begin = fold_start[i];		int end = fold_start[i+1];		int j,k;		struct svm_problem subprob;		subprob.l = l-(end-begin);		subprob.x = Malloc(struct svm_node*,subprob.l);		subprob.y = Malloc(double,subprob.l);					k=0;		for(j=0;j<begin;j++)		{			subprob.x[k] = prob->x[perm[j]];			subprob.y[k] = prob->y[perm[j]];			++k;		}		for(j=end;j<l;j++)		{			subprob.x[k] = prob->x[perm[j]];			subprob.y[k] = prob->y[perm[j]];			++k;		}		struct svm_model *submodel = svm_train(&subprob,param);		if(param->probability && 		   (param->svm_type == C_SVC || param->svm_type == NU_SVC))		{			double *prob_estimates=Malloc(double,svm_get_nr_class(submodel));			for(j=begin;j<end;j++)				target[perm[j]] = svm_predict_probability(submodel,prob->x[perm[j]],prob_estimates);			free(prob_estimates);					}		else			for(j=begin;j<end;j++)				target[perm[j]] = svm_predict(submodel,prob->x[perm[j]]);		svm_destroy_model(submodel);		free(subprob.x);		free(subprob.y);	}	free(fold_start);	free(perm);	}int svm_get_svm_type(const svm_model *model){	return model->param.svm_type;}int svm_get_nr_class(const svm_model *model){	return model->nr_class;}void svm_get_labels(const svm_model *model, int* label){	if (model->label != NULL)		for(int i=0;i<model->nr_class;i++)			label[i] = model->label[i];}double svm_get_svr_probability(const svm_model *model){	if ((model->param.svm_type == EPSILON_SVR || model->param.svm_type == NU_SVR) &&	    model->probA!=NULL)		return model->probA[0];	else	{		info("Model doesn't contain information for SVR probability inference\n");		return 0;	}}void svm_predict_values(const svm_model *model, const svm_node *x, double* dec_values){	if(model->param.svm_type == ONE_CLASS ||	   model->param.svm_type == EPSILON_SVR ||	   model->param.svm_type == NU_SVR)	{		double *sv_coef = model->sv_coef[0];		double sum = 0;		for(int i=0;i<model->l;i++)			sum += sv_coef[i] * Kernel::k_function(x,model->SV[i],model->param);		sum -= model->rho[0];		*dec_values = sum;	}	else	{		int i;		int nr_class = model->nr_class;		int l = model->l;				double *kvalue = Malloc(double,l);		for(i=0;i<l;i++)			kvalue[i] = Kernel::k_function(x,model->SV[i],model->param);		int *start = Malloc(int,nr_class);		start[0] = 0;		for(i=1;i<nr_class;i++)			start[i] = start[i-1]+model->nSV[i-1];		int p=0;		int pos=0;		for(i=0;i<nr_class;i++)			for(int j=i+1;j<nr_class;j++)			{				double sum = 0;				int si = start[i];				int sj = start[j];				int ci = model->nSV[i];				int cj = model->nSV[j];								int k;				double *coef1 = model->sv_coef[j-1];				double *coef2 = model->sv_coef[i];				for(k=0;k<ci;k++)					sum += coef1[si+k] * kvalue[si+k];				for(k=0;k<cj;k++)					sum += coef2[sj+k] * kvalue[sj+k];				sum -= model->rho[p++];				dec_values[pos++] = sum;			}		free(kvalue);		free(start);	}}double svm_predict(const svm_model *model, const svm_node *x){	if(model->param.svm_type == ONE_CLASS ||	   model->param.svm_type == EPSILON_SVR ||	   model->param.svm_type == NU_SVR)	{		double res;		svm_predict_values(model, x, &res);				if(model->param.svm_type == ONE_CLASS)			return (res>0)?1:-1;		else			return res;	}	else	{		int i;		int nr_class = model->nr_class;		double *dec_values = Malloc(double, nr_class*(nr_class-1)/2);		svm_predict_values(model, x, dec_values);		int *vote = Malloc(int,nr_class);		for(i=0;i<nr_class;i++)			vote[i] = 0;		int pos=0;		for(i=0;i<nr_class;i++)			for(int j=i+1;j<nr_class;j++)			{				if(dec_values[pos++] > 0)					++vote[i];				else					++vote[j];			}		int vote_max_idx = 0;		for(i=1;i<nr_class;i++)			if(vote[i] > vote[vote_max_idx])				vote_max_idx = i;		free(vote);		free(dec_values);		return model->label[vote_max_idx];	}}double svm_predict_probability(	const svm_model *model, const svm_node *x, double *prob_estimates){	if ((model->param.svm_type == C_SVC || model->param.svm_type == NU_SVC) &&	    model->probA!=NULL && model->probB!=NULL)	{		int i;		int nr_class = model->nr_class;		double *dec_values = Malloc(double, nr_class*(nr_class-1)/2);		svm_predict_values(model, x, dec_values);		double min_prob=1e-7;		double **pairwise_prob=Malloc(double *,nr_class);		for(i=0;i<nr_class;i++)			pairwise_prob[i]=Malloc(double,nr_class);		int k=0;		for(i=0;i<nr_class;i++)			for(int j=i+1;j<nr_class;j++)			{				pairwise_prob[i][j]=min(max(sigmoid_predict(dec_values[k],model->probA[k],model->probB[k]),min_prob),1-min_prob);				pairwise_prob[j][i]=1-pairwise_prob[i][j];				k++;			}		multiclass_probability(nr_class,pairwise_prob,prob_estimates);		int prob_max_idx = 0;		for(i=1;i<nr_class;i++)			if(prob_estimates[i] > prob_estimates[prob_max_idx])				prob_max_idx = i;		for(i=0;i<nr_class;i++)			free(pairwise_prob[i]);		free(dec_values);                free(pairwise_prob);	     		return model->label[prob_max_idx];	}	else 		return svm_predict(model, x);}const char *svm_type_table[] ={	"c_svc","nu_svc","one_class","epsilon_svr","nu_svr",NULL};const char *kernel_type_table[] ={// Canasai's addition begin	"linear","polynomial","rbf","sigmoid","string",NULL// Canasai's addition end};int svm_save_model(const char *model_file_name, const svm_model *model){	FILE *fp = fopen(model_file_name,"w");	if(fp==NULL) return -1;	const svm_parameter& param = model->param;	fprintf(fp,"svm_type %s\n", svm_type_table[param.svm_type]);	fprintf(fp,"kernel_type %s\n", kernel_type_table[param.kernel_type]);	if(param.kernel_type == POLY || param.kernel_type == STRING)		fprintf(fp,"degree %g\n", param.degree);	if(param.kernel_type == POLY || param.kernel_type == RBF || param.kernel_type == SIGMOID || param.kernel_type == STRING)		fprintf(fp,"gamma %g\n", param.gamma);	if(param.kernel_type == POLY || param.kernel_type == SIGMOID || param.kernel_type == STRING)		fprintf(fp,"coef0 %g\n", param.coef0);	int nr_class = model->nr_class;	int l = model->l;	fprintf(fp, "nr_class %d\n", nr_class);	fprintf(fp, "total_sv %d\n",l);		{		fprintf(fp, "rho");		for(int i=0;i<nr_class*(nr_class-1)/2;i++)			fprintf(fp," %g",model->rho[i]);		fprintf(fp, "\n");	}		if(model->label)	{		fprintf(fp, "label");		for(int i=0;i<nr_class;i++)			fprintf(fp," %d",model->label[i]);		fprintf(fp, "\n");	}

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -