📄 solver.cs
字号:
stepsize = 1; // Line Search
while (stepsize >= min_step)
{
newA = A + stepsize * dA;
newB = B + stepsize * dB;
// New function value
newf = 0.0;
for (i = 0; i < l; i++)
{
fApB = dec_values[i] * newA + newB;
if (fApB >= 0)
newf += t[i] * fApB + Math.Log(1 + Math.Exp(-fApB));
else
newf += (t[i] - 1) * fApB + Math.Log(1 + Math.Exp(fApB));
}
// Check sufficient decrease
if (newf < fval + 0.0001 * stepsize * gd)
{
A = newA; B = newB; fval = newf;
break;
}
else
stepsize = stepsize / 2.0;
}
if (stepsize < min_step)
{
Debug.Write("Line search fails in two-class probability estimates\n");
break;
}
}
if (iter >= max_iter)
Debug.Write("Reaching maximal iterations in two-class probability estimates\n");
probAB[0] = A; probAB[1] = B;
}
private static double sigmoid_predict(double decision_value, double A, double B)
{
double fApB = decision_value * A + B;
if (fApB >= 0)
return Math.Exp(-fApB) / (1.0 + Math.Exp(-fApB));
else
return 1.0 / (1 + Math.Exp(fApB));
}
// Method 2 from the multiclass_prob paper by Wu, Lin, and Weng
private static void multiclass_probability(int k, double[,] r, double[] p)
{
int t,j;
int iter = 0, max_iter=Math.Max(100,k);
double[,] Q=new double[k,k];
double[] Qp= new double[k];
double pQp, eps=0.005/k;
for (t=0;t<k;t++)
{
p[t]=1.0/k; // Valid if k = 1
Q[t,t]=0;
for (j=0;j<t;j++)
{
Q[t,t]+=r[j,t]*r[j,t];
Q[t,j]=Q[j,t];
}
for (j=t+1;j<k;j++)
{
Q[t,t]+=r[j,t]*r[j,t];
Q[t,j]=-r[j,t]*r[t,j];
}
}
for (iter=0;iter<max_iter;iter++)
{
// stopping condition, recalculate QP,pQP for numerical accuracy
pQp=0;
for (t=0;t<k;t++)
{
Qp[t]=0;
for (j=0;j<k;j++)
Qp[t]+=Q[t,j]*p[j];
pQp+=p[t]*Qp[t];
}
double max_error=0;
for (t=0;t<k;t++)
{
double error=Math.Abs(Qp[t]-pQp);
if (error>max_error)
max_error=error;
}
if (max_error<eps) break;
for (t=0;t<k;t++)
{
double diff=(-Qp[t]+pQp)/Q[t,t];
p[t]+=diff;
pQp=(pQp+diff*(diff*Q[t,t]+2*Qp[t]))/(1+diff)/(1+diff);
for (j=0;j<k;j++)
{
Qp[j]=(Qp[j]+diff*Q[t,j])/(1+diff);
p[j]/=(1+diff);
}
}
}
if (iter>=max_iter)
Debug.Write("Exceeds max_iter in multiclass_prob\n");
}
// Cross-validation decision values for probability estimates
private static void svm_binary_svc_probability(Problem prob, Parameter param, double Cp, double Cn, double[] probAB)
{
Random rand = new Random();
int i;
int nr_fold = 5;
int[] perm = new int[prob.Count];
double[] dec_values = new double[prob.Count];
// random shuffle
for (i = 0; i < prob.Count; i++) perm[i] = i;
for (i = 0; i < prob.Count; i++)
{
int j = i + (int)(rand.NextDouble() * (prob.Count - i));
do { int _ = perm[i]; perm[i] = perm[j]; perm[j] = _; } while (false);
}
for (i = 0; i < nr_fold; i++)
{
int begin = i * prob.Count / nr_fold;
int end = (i + 1) * prob.Count / nr_fold;
int j, k;
Problem subprob = new Problem();
subprob.Count = prob.Count - (end - begin);
subprob.X = new Node[subprob.Count][];
subprob.Y = new double[subprob.Count];
k = 0;
for (j = 0; j < begin; j++)
{
subprob.X[k] = prob.X[perm[j]];
subprob.Y[k] = prob.Y[perm[j]];
++k;
}
for (j = end; j < prob.Count; j++)
{
subprob.X[k] = prob.X[perm[j]];
subprob.Y[k] = prob.Y[perm[j]];
++k;
}
int p_count = 0, n_count = 0;
for (j = 0; j < k; j++)
if (subprob.Y[j] > 0)
p_count++;
else
n_count++;
if (p_count == 0 && n_count == 0)
for (j = begin; j < end; j++)
dec_values[perm[j]] = 0;
else if (p_count > 0 && n_count == 0)
for (j = begin; j < end; j++)
dec_values[perm[j]] = 1;
else if (p_count == 0 && n_count > 0)
for (j = begin; j < end; j++)
dec_values[perm[j]] = -1;
else
{
Parameter subparam = (Parameter)param.Clone();
subparam.Probability = false;
subparam.C = 1.0;
subparam.WeightCount = 2;
subparam.WeightLabels = new int[2];
subparam.Weights = new double[2];
subparam.WeightLabels[0] = +1;
subparam.WeightLabels[1] = -1;
subparam.Weights[0] = Cp;
subparam.Weights[1] = Cn;
Model submodel = svm_train(subprob, subparam);
for (j = begin; j < end; j++)
{
double[] dec_value = new double[1];
svm_predict_values(submodel, prob.X[perm[j]], dec_value);
dec_values[perm[j]] = dec_value[0];
// ensure +1 -1 order; reason not using CV subroutine
dec_values[perm[j]] *= submodel.ClassLabels[0];
}
}
}
sigmoid_train(prob.Count, dec_values, prob.Y, probAB);
}
// Return parameter of a Laplace distribution
private static double svm_svr_probability(Problem prob, Parameter param)
{
int i;
int nr_fold = 5;
double[] ymv = new double[prob.Count];
double mae = 0;
Parameter newparam = (Parameter)param.Clone();
newparam.Probability = false;
svm_cross_validation(prob, newparam, nr_fold, ymv);
for (i = 0; i < prob.Count; i++)
{
ymv[i] = prob.Y[i] - ymv[i];
mae += Math.Abs(ymv[i]);
}
mae /= prob.Count;
double std = Math.Sqrt(2 * mae * mae);
int count = 0;
mae = 0;
for (i = 0; i < prob.Count; i++)
if (Math.Abs(ymv[i]) > 5 * std)
count = count + 1;
else
mae += Math.Abs(ymv[i]);
mae /= (prob.Count - count);
Debug.Write("Prob. model for test data: target value = predicted value + z,\nz: Laplace distribution e^(-|z|/sigma)/(2sigma),sigma=" + mae + "\n");
return mae;
}
// label: label name, start: begin of each class, count: #data of classes, perm: indices to the original data
// perm, length l, must be allocated before calling this subroutine
private static void svm_group_classes(Problem prob, int[] nr_class_ret, int[][] label_ret, int[][] start_ret, int[][] count_ret, int[] perm)
{
int l = prob.Count;
int max_nr_class = 16;
int nr_class = 0;
int[] label = new int[max_nr_class];
int[] count = new int[max_nr_class];
int[] data_label = new int[l];
int i;
for (i = 0; i < l; i++)
{
int this_label = (int)(prob.Y[i]);
int j;
for (j = 0; j < nr_class; j++)
{
if (this_label == label[j])
{
++count[j];
break;
}
}
data_label[i] = j;
if (j == nr_class)
{
if (nr_class == max_nr_class)
{
max_nr_class *= 2;
int[] new_data = new int[max_nr_class];
Array.Copy(label, 0, new_data, 0, label.Length);
label = new_data;
new_data = new int[max_nr_class];
Array.Copy(count, 0, new_data, 0, count.Length);
count = new_data;
}
label[nr_class] = this_label;
count[nr_class] = 1;
++nr_class;
}
}
int[] start = new int[nr_class];
start[0] = 0;
for (i = 1; i < nr_class; i++)
start[i] = start[i - 1] + count[i - 1];
for (i = 0; i < l; i++)
{
perm[start[data_label[i]]] = i;
++start[data_label[i]];
}
start[0] = 0;
for (i = 1; i < nr_class; i++)
start[i] = start[i - 1] + count[i - 1];
nr_class_ret[0] = nr_class;
label_ret[0] = label;
start_ret[0] = start;
count_ret[0] = count;
}
//
// Interface functions
//
public static Model svm_train(Problem prob, Parameter param)
{
Model model = new Model();
model.Parameter = param;
if (param.SvmType == SvmType.ONE_CLASS ||
param.SvmType == SvmType.EPSILON_SVR ||
param.SvmType == SvmType.NU_SVR)
{
// regression or one-class-svm
model.NumberOfClasses = 2;
model.ClassLabels = null;
model.NumberOfSVPerClass = null;
model.PairwiseProbabilityA = null; model.PairwiseProbabilityB = null;
model.SupportVectorCoefficients = new double[1][];
if (param.Probability &&
(param.SvmType == SvmType.EPSILON_SVR ||
param.SvmType == SvmType.NU_SVR))
{
model.PairwiseProbabilityA = new double[1];
model.PairwiseProbabilityA[0] = svm_svr_probability(prob, param);
}
decision_function f = svm_train_one(prob, param, 0, 0);
model.Rho = new double[1];
model.Rho[0] = f.rho;
int nSV = 0;
int i;
for (i = 0; i < prob.Count; i++)
if (Math.Abs(f.alpha[i]) > 0) ++nSV;
model.SupportVectorCount = nSV;
model.SupportVectors = new Node[nSV][];
model.SupportVectorCoefficients[0] = new double[nSV];
int j = 0;
for (i = 0; i < prob.Count; i++)
if (Math.Abs(f.alpha[i]) > 0)
{
model.SupportVectors[j] = prob.X[i];
model.SupportVectorCoefficients[0][j] = f.alpha[i];
++j;
}
}
else
{
// classification
int l = prob.Count;
int[] tmp_nr_class = new int[1];
int[][] tmp_label = new int[1][];
int[][] tmp_start = new int[1][];
int[][] tmp_count = new int[1][];
int[] perm = new int[l];
// group training data of the same class
svm_group_classes(prob, tmp_nr_class, tmp_label, tmp_start, tmp_count, perm);
int nr_class = tmp_nr_class[0];
int[] label = tmp_label[0];
int[] start = tmp_start[0];
int[] count = tmp_count[0];
Node[][] x = new Node[l][];
int i;
for (i = 0; i < l; i++)
x[i] = prob.X[perm[i]];
// calculate weighted C
double[] weighted_C = new double[nr_class];
for (i = 0; i < nr_class; i++)
weighted_C[i] = param.C;
for (i = 0; i < param.WeightCount; i++)
{
int j;
for (j = 0; j < nr_class; j++)
if (param.WeightLabels[i] == label[j])
break;
if (j == nr_class)
Debug.Write("warning: class label " + param.WeightLabels[i] + " specified in weight is not found\n");
else
weighted_C[j] *= param.Weights[i];
}
// train k*(k-1)/2 models
bool[] nonzero = new bool[l];
for (i = 0; i < l; i++)
nonzero[i] = false;
decision_function[] f = new decision_function[nr_class * (nr_class - 1) / 2];
double[] probA = null, probB = null;
if (param.Probabilit
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -