⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 svm.cs

📁 支持向量机程序,非常有用,可以供大家实验使用,改进.希望能多大家工作有帮助.
💻 CS
📖 第 1 页 / 共 5 页
字号:
					pQp = (pQp + diff * (diff * Q[t][t] + 2 * Qp[t])) / (1 + diff) / (1 + diff);
					for (j = 0; j < k; j++)
					{
						Qp[j] = (Qp[j] + diff * Q[t][j]) / (1 + diff);
						p[j] /= (1 + diff);
					}
				}
			}
			if (iter >= max_iter)
				System.Console.Error.Write("Exceeds max_iter in multiclass_prob\n");
		}
		
		// Cross-validation decision values for probability estimates
		private static void  svm_binary_svc_probability(svm_problem prob, svm_parameter param, double Cp, double Cn, double[] probAB)
		{
			int i;
			int nr_fold = 5;
			int[] perm = new int[prob.l];
			double[] dec_values = new double[prob.l];
			
			// random shuffle
			for (i = 0; i < prob.l; i++)
				perm[i] = i;
			for (i = 0; i < prob.l; i++)
			{
				//UPGRADE_WARNING: 在 C# 中,收缩转换可能产生意外的结果。 'ms-help://MS.VSCC.2003/commoner/redir/redirect.htm?keyword="jlca1042"'
				int j = i + (int) (SupportClass.Random.NextDouble() * (prob.l - i));
				do 
				{
					int _ = perm[i]; perm[i] = perm[j]; perm[j] = _;
				}
				while (false);
			}
			for (i = 0; i < nr_fold; i++)
			{
				int begin = i * prob.l / nr_fold;
				int end = (i + 1) * prob.l / nr_fold;
				int j, k;
				svm_problem subprob = new svm_problem();
				
				subprob.l = prob.l - (end - begin);
				subprob.x = new svm_node[subprob.l][];
				subprob.y = new double[subprob.l];
				
				k = 0;
				for (j = 0; j < begin; j++)
				{
					subprob.x[k] = prob.x[perm[j]];
					subprob.y[k] = prob.y[perm[j]];
					++k;
				}
				for (j = end; j < prob.l; j++)
				{
					subprob.x[k] = prob.x[perm[j]];
					subprob.y[k] = prob.y[perm[j]];
					++k;
				}
				int p_count = 0, n_count = 0;
				for (j = 0; j < k; j++)
					if (subprob.y[j] > 0)
						p_count++;
					else
						n_count++;
				
				if (p_count == 0 && n_count == 0)
					for (j = begin; j < end; j++)
						dec_values[perm[j]] = 0;
				else if (p_count > 0 && n_count == 0)
					for (j = begin; j < end; j++)
						dec_values[perm[j]] = 1;
				else if (p_count == 0 && n_count > 0)
					for (j = begin; j < end; j++)
						dec_values[perm[j]] = - 1;
				else
				{
					svm_parameter subparam = (svm_parameter) param.Clone();
					subparam.probability = 0;
					subparam.C = 1.0;
					subparam.nr_weight = 2;
					subparam.weight_label = new int[2];
					subparam.weight = new double[2];
					subparam.weight_label[0] = + 1;
					subparam.weight_label[1] = - 1;
					subparam.weight[0] = Cp;
					subparam.weight[1] = Cn;
					svm_model submodel = svm_train(subprob, subparam);
					for (j = begin; j < end; j++)
					{
						double[] dec_value = new double[1];
						svm_predict_values(submodel, prob.x[perm[j]], dec_value);
						dec_values[perm[j]] = dec_value[0];
						// ensure +1 -1 order; reason not using CV subroutine
						dec_values[perm[j]] *= submodel.label[0];
					}
				}
			}
			sigmoid_train(prob.l, dec_values, prob.y, probAB);
		}
		
		// Return parameter of a Laplace distribution 
		private static double svm_svr_probability(svm_problem prob, svm_parameter param)
		{
			int i;
			int nr_fold = 5;
			double[] ymv = new double[prob.l];
			double mae = 0;
			
			svm_parameter newparam = (svm_parameter) param.Clone();
			newparam.probability = 0;
			svm_cross_validation(prob, newparam, nr_fold, ymv);
			for (i = 0; i < prob.l; i++)
			{
				ymv[i] = prob.y[i] - ymv[i];
				mae += System.Math.Abs(ymv[i]);
			}
			mae /= prob.l;
			double std = System.Math.Sqrt(2 * mae * mae);
			int count = 0;
			mae = 0;
			for (i = 0; i < prob.l; i++)
				if (System.Math.Abs(ymv[i]) > 5 * std)
					count = count + 1;
				else
					mae += System.Math.Abs(ymv[i]);
			mae /= (prob.l - count);
			System.Console.Error.Write("Prob. model for test data: target value = predicted value + z,\nz: Laplace distribution e^(-|z|/sigma)/(2sigma),sigma=" + mae + "\r\n");
			return mae;
		}
		
		// label: label name, start: begin of each class, count: #data of classes, perm: indices to the original data
		// perm, length l, must be allocated before calling this subroutine
		private static void  svm_group_classes(svm_problem prob, int[] nr_class_ret, int[][] label_ret, int[][] start_ret, int[][] count_ret, int[] perm)
		{
			int l = prob.l;
			int max_nr_class = 16;
			int nr_class = 0;
			int[] label = new int[max_nr_class];
			int[] count = new int[max_nr_class];
			int[] data_label = new int[l];
			int i;
			
			for (i = 0; i < l; i++)
			{
				//UPGRADE_WARNING: 在 C# 中,收缩转换可能产生意外的结果。 'ms-help://MS.VSCC.2003/commoner/redir/redirect.htm?keyword="jlca1042"'
				int this_label = (int) (prob.y[i]);
				int j;
				for (j = 0; j < nr_class; j++)
				{
					if (this_label == label[j])
					{
						++count[j];
						break;
					}
				}
				data_label[i] = j;
				if (j == nr_class)
				{
					if (nr_class == max_nr_class)
					{
						max_nr_class *= 2;
						int[] new_data = new int[max_nr_class];
						Array.Copy((System.Array) label, 0, (System.Array) new_data, 0, label.Length);
						label = new_data;
						new_data = new int[max_nr_class];
						Array.Copy((System.Array) count, 0, (System.Array) new_data, 0, count.Length);
						count = new_data;
					}
					label[nr_class] = this_label;
					count[nr_class] = 1;
					++nr_class;
				}
			}
			
			int[] start = new int[nr_class];
			start[0] = 0;
			for (i = 1; i < nr_class; i++)
				start[i] = start[i - 1] + count[i - 1];
			for (i = 0; i < l; i++)
			{
				perm[start[data_label[i]]] = i;
				++start[data_label[i]];
			}
			start[0] = 0;
			for (i = 1; i < nr_class; i++)
				start[i] = start[i - 1] + count[i - 1];
			
			nr_class_ret[0] = nr_class;
			label_ret[0] = label;
			start_ret[0] = start;
			count_ret[0] = count;
		}
		
		//
		// Interface functions
		//
		public static svm_model svm_train(svm_problem prob, svm_parameter param)
		{
			svm_model model = new svm_model();
			model.param = param;
			
			if (param.svm_type == svm_parameter.ONE_CLASS || param.svm_type == svm_parameter.EPSILON_SVR || param.svm_type == svm_parameter.NU_SVR)
			{
				// regression or one-class-svm
				model.nr_class = 2;
				model.label = null;
				model.nSV = null;
				model.probA = null; model.probB = null;
				model.sv_coef = new double[1][];
				
				if (param.probability == 1 && (param.svm_type == svm_parameter.EPSILON_SVR || param.svm_type == svm_parameter.NU_SVR))
				{
					model.probA = new double[1];
					model.probA[0] = svm_svr_probability(prob, param);
				}
				
				decision_function f = svm_train_one(prob, param, 0, 0);
				model.rho = new double[1];
				model.rho[0] = f.rho;
				
				int nSV = 0;
				int i;
				for (i = 0; i < prob.l; i++)
					if (System.Math.Abs(f.alpha[i]) > 0)
						++nSV;
				model.l = nSV;
				model.SV = new svm_node[nSV][];
				model.sv_coef[0] = new double[nSV];
				int j = 0;
				for (i = 0; i < prob.l; i++)
					if (System.Math.Abs(f.alpha[i]) > 0)
					{
						model.SV[j] = prob.x[i];
						model.sv_coef[0][j] = f.alpha[i];
						++j;
					}
			}
			else
			{
				// classification
				int l = prob.l;
				int[] tmp_nr_class = new int[1];
				int[][] tmp_label = new int[1][];
				int[][] tmp_start = new int[1][];
				int[][] tmp_count = new int[1][];
				int[] perm = new int[l];
				
				// group training data of the same class
				svm_group_classes(prob, tmp_nr_class, tmp_label, tmp_start, tmp_count, perm);
				int nr_class = tmp_nr_class[0];
				int[] label = tmp_label[0];
				int[] start = tmp_start[0];
				int[] count = tmp_count[0];
				svm_node[][] x = new svm_node[l][];
				int i;
				for (i = 0; i < l; i++)
					x[i] = prob.x[perm[i]];
				
				// calculate weighted C
				
				double[] weighted_C = new double[nr_class];
				for (i = 0; i < nr_class; i++)
					weighted_C[i] = param.C;
				for (i = 0; i < param.nr_weight; i++)
				{
					int j;
					for (j = 0; j < nr_class; j++)
						if (param.weight_label[i] == label[j])
							break;
					if (j == nr_class)
						System.Console.Error.Write("warning: class label " + param.weight_label[i] + " specified in weight is not found\n");
					else
						weighted_C[j] *= param.weight[i];
				}
				
				// train k*(k-1)/2 models
				
				bool[] nonzero = new bool[l];
				for (i = 0; i < l; i++)
					nonzero[i] = false;
				decision_function[] f = new decision_function[nr_class * (nr_class - 1) / 2];
				
				double[] probA = null, probB = null;
				if (param.probability == 1)
				{
					probA = new double[nr_class * (nr_class - 1) / 2];
					probB = new double[nr_class * (nr_class - 1) / 2];
				}
				
				int p = 0;
				for (i = 0; i < nr_class; i++)
					for (int j = i + 1; j < nr_class; j++)
					{
						svm_problem sub_prob = new svm_problem();
						int si = start[i], sj = start[j];
						int ci = count[i], cj = count[j];
						sub_prob.l = ci + cj;
						sub_prob.x = new svm_node[sub_prob.l][];
						sub_prob.y = new double[sub_prob.l];
						int k;
						for (k = 0; k < ci; k++)
						{
							sub_prob.x[k] = x[si + k];
							sub_prob.y[k] = + 1;
						}
						for (k = 0; k < cj; k++)
						{
							sub_prob.x[ci + k] = x[sj + k];
							sub_prob.y[ci + k] = - 1;
						}
						
						if (param.probability == 1)
						{
							double[] probAB = new double[2];
							svm_binary_svc_probability(sub_prob, param, weighted_C[i], weighted_C[j], probAB);
							probA[p] = probAB[0];
							probB[p] = probAB[1];
						}
						
						f[p] = svm_train_one(sub_prob, param, weighted_C[i], weighted_C[j]);
						for (k = 0; k < ci; k++)
							if (!nonzero[si + k] && System.Math.Abs(f[p].alpha[k]) > 0)
								nonzero[si + k] = true;
						for (k = 0; k < cj; k++)
							if (!nonzero[sj + k] && System.Math.Abs(f[p].alpha[ci + k]) > 0)
								nonzero[sj + k] = true;
						++p;
					}
				
				// build output
				
				model.nr_class = nr_class;
				
				model.label = new int[nr_class];
				for (i = 0; i < nr_class; i++)
					model.label[i] = label[i];
				
				model.rho = new double[nr_class * (nr_class - 1) / 2];
				for (i = 0; i < nr_class * (nr_class - 1) / 2; i++)
					model.rho[i] = f[i].rho;
				
				if (param.probability == 1)
				{
					model.probA = new double[nr_class * (nr_class - 1) / 2];
					model.probB = new double[nr_class * (nr_class - 1) / 2];
					for (i = 0; i < nr_class * (nr_class - 1) / 2; i++)
					{
						model.probA[i] = probA[i];
						model.probB[i] = probB[i];
					}
				}
				else
				{
					model.probA = null;
					model.probB = null;
				}
				
				int nnz = 0;
				int[] nz_count = new int[nr_class];
				model.nSV = new int[nr_class];
				for (i = 0; i < nr_class; i++)
				{
					int nSV = 0;
					for (int j = 0; j < count[i]; j++)
						if (nonzero[start[i] + j])
						{
							++nSV;
							++nnz;
						}
					model.nSV[i] = nSV;
					nz_count[i] = nSV;
				}
				
				System.Console.Out.Write("Total nSV = " + nnz + "\r\n");
				
				model.l = nnz;
				model.SV = new svm_node[nnz][];
				p = 0;
				for (i = 0; i < l; i++)
					if (nonzero[i])
						model.SV[p++] = x[i];
				
				int[] nz_start = new int[nr_class];
				nz_start[0] = 0;
				for (i = 1; i < nr_class; i++)
					nz_start[i] = nz_start[i - 1] + nz_count[i - 1];
				
				model.sv_coef = new double[nr_class - 1][];
				for (i = 0; i < nr_class - 1; i++)
					model.sv_coef[i] = new double[nnz];
				
				p = 0;
				for (i = 0; i < nr_class; i++)
					for (int j = i + 1; j < nr_class; j++)
					{
						// classifier (i,j): coefficients with
						// i are in sv_coef[j-1][nz_start[i]...],
						// j are in sv_coef[i][nz_start[j]...]
						
						int si = start[i];
						int sj = start[j];
						int ci = count[i];
						int cj = count[j];
						
						int q = nz_start[i];
						int k;
						for (k = 0; k < ci; k++)
							if (nonzero[si + k])
								model.sv_coef[j - 1][q++] = f[p].alpha[k];
						q = nz_start[j];
						for (k = 0; k < cj; k++)
							if (nonzero[sj + k])
								model.sv_coef[i][q++] = f[p].alpha[ci + k];
						++p;
					}
			}
			return model;
		}
		
		// Stratified cross validation
		public static void  svm_cross_validation(svm_problem prob, svm_parameter param, int nr_fold, double[] target)
		{
			int i;
			int[] fold_start = new int[nr_fold + 1];
			int l = prob.l;
			int[] perm = new int[l];
			
			// stratified cv may not give leave-one-out rate
			// Each class to l folds -> some folds may have zero elements
			if ((param.svm_type == svm_parameter.C_SVC || param.svm_type == svm_parameter.NU_SVC) && nr_fold < l)
			{
				int[] tmp_nr_class = new int[1];
				int[][] tmp_label = new int[1][];
				int[][] tmp_start = new int[1][];
				int[][] tmp_count = new int[1][];
				
				svm_group_classes(prob, tmp_nr_class, tmp_label, tmp_start, tmp_count, perm);
				
				int nr_class = tmp_nr_class[0];
				int[] label = tmp_label[0];
				int[] start = tmp_start[0];
				int[] count = tmp_count[0];
				
				// random shuffle and then data grouped by fold using the array perm
				int[] fold_count = new int[nr_fold];
				int c;
				int[] index = new int[l];
				for (i = 0; i < l; i++)
					index[i] = perm[i];
				for (c = 0; c < nr_class; c++)
					for (i = 0; i < count[c]; i++)
					{
						//UPGRADE_WARNING: 在 C# 中,收缩转换可能产生意外的结果。 'ms-help://MS.VSCC.2003/commoner/redir/redirect.htm?keyword="jlca1042"'
						int j = i + (int) (SupportClass.Random.NextDouble() * (count[c] - i));
						do 
						{
							int _ = index[start[c] + j]; index[start[c] + j] = index[start[c] + i]; index[start[c] + i] = _;
						}
						while (false);
					}
				for (i = 0; i < nr_fold; i++)
				{
					fold_count[i] = 0;
					for (c = 0; c < nr_class; c++)
						fold_count[i] += (i + 1) * count[c] / nr_fold - i * count[c] / nr_fold;
				}
				fold_start[0] = 0;
				for (i = 1; i <= nr_fold; i++)
					fold_start[i] = fold_start[i - 1] + fold_count[i - 1];
				for (c = 0; c < nr_class; c++)
					for (i = 0; i < nr_fold; i++)
					{
						int begin = start[c] + i * count[c] / nr_fold;
						int end = start[c] + (i + 1) * count[c] / nr_fold;
						for (int j = begin; j < end; j++)
						{
							perm[fold_start[i]] = index[j];
							fold_start[i]++;
						}
					}
				fold_start[0] = 0;
				for (i = 1; i <= nr_fold; i++)
					fold_start[i] = fold_start[i - 1] + fold_count[i - 1];
			}
			else
			{
				for (i = 0; i < l; i++)
					perm[i] = i;
				for (i = 0; i < l; i++)
				{
					//

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -