📄 svm.java
字号:
double fApB = decision_value*A+B;
if (fApB >= 0)
return Math.exp(-fApB)/(1.0+Math.exp(-fApB));
else
return 1.0/(1+Math.exp(fApB)) ;
}
// Method 2 from the multiclass_prob paper by Wu, Lin, and Weng
private static void multiclass_probability(int k, double[][] r, double[] p)
{
int t,j;
int iter = 0, max_iter=Math.max(100,k);
double[][] Q=new double[k][k];
double[] Qp= new double[k];
double pQp, eps=0.005/k;
for (t=0;t<k;t++)
{
p[t]=1.0/k; // Valid if k = 1
Q[t][t]=0;
for (j=0;j<t;j++)
{
Q[t][t]+=r[j][t]*r[j][t];
Q[t][j]=Q[j][t];
}
for (j=t+1;j<k;j++)
{
Q[t][t]+=r[j][t]*r[j][t];
Q[t][j]=-r[j][t]*r[t][j];
}
}
for (iter=0;iter<max_iter;iter++)
{
// stopping condition, recalculate QP,pQP for numerical accuracy
pQp=0;
for (t=0;t<k;t++)
{
Qp[t]=0;
for (j=0;j<k;j++)
Qp[t]+=Q[t][j]*p[j];
pQp+=p[t]*Qp[t];
}
double max_error=0;
for (t=0;t<k;t++)
{
double error=Math.abs(Qp[t]-pQp);
if (error>max_error)
max_error=error;
}
if (max_error<eps) break;
for (t=0;t<k;t++)
{
double diff=(-Qp[t]+pQp)/Q[t][t];
p[t]+=diff;
pQp=(pQp+diff*(diff*Q[t][t]+2*Qp[t]))/(1+diff)/(1+diff);
for (j=0;j<k;j++)
{
Qp[j]=(Qp[j]+diff*Q[t][j])/(1+diff);
p[j]/=(1+diff);
}
}
}
if (iter>=max_iter)
System.err.print("Exceeds max_iter in multiclass_prob\n");
}
// Cross-validation decision values for probability estimates
private static void svm_binary_svc_probability(svm_problem prob, svm_parameter param, double Cp, double Cn, double[] probAB)
{
int i;
int nr_fold = 5;
int[] perm = new int[prob.l];
double[] dec_values = new double[prob.l];
// random shuffle
for(i=0;i<prob.l;i++) perm[i]=i;
for(i=0;i<prob.l;i++)
{
int j = i+(int)(Math.random()*(prob.l-i));
do {int _=perm[i]; perm[i]=perm[j]; perm[j]=_;} while(false);
}
for(i=0;i<nr_fold;i++)
{
int begin = i*prob.l/nr_fold;
int end = (i+1)*prob.l/nr_fold;
int j,k;
svm_problem subprob = new svm_problem();
subprob.l = prob.l-(end-begin);
subprob.x = new svm_node[subprob.l][];
subprob.y = new double[subprob.l];
k=0;
for(j=0;j<begin;j++)
{
subprob.x[k] = prob.x[perm[j]];
subprob.y[k] = prob.y[perm[j]];
++k;
}
for(j=end;j<prob.l;j++)
{
subprob.x[k] = prob.x[perm[j]];
subprob.y[k] = prob.y[perm[j]];
++k;
}
int p_count=0,n_count=0;
for(j=0;j<k;j++)
if(subprob.y[j]>0)
p_count++;
else
n_count++;
if(p_count==0 && n_count==0)
for(j=begin;j<end;j++)
dec_values[perm[j]] = 0;
else if(p_count > 0 && n_count == 0)
for(j=begin;j<end;j++)
dec_values[perm[j]] = 1;
else if(p_count == 0 && n_count > 0)
for(j=begin;j<end;j++)
dec_values[perm[j]] = -1;
else
{
svm_parameter subparam = (svm_parameter)param.clone();
subparam.probability=0;
subparam.C=1.0;
subparam.nr_weight=2;
subparam.weight_label = new int[2];
subparam.weight = new double[2];
subparam.weight_label[0]=+1;
subparam.weight_label[1]=-1;
subparam.weight[0]=Cp;
subparam.weight[1]=Cn;
svm_model submodel = svm_train(subprob,subparam);
for(j=begin;j<end;j++)
{
double[] dec_value=new double[1];
// svm_predict_values(submodel,prob.x[perm[j]],dec_value);
dec_values[perm[j]]=dec_value[0];
// ensure +1 -1 order; reason not using CV subroutine
dec_values[perm[j]] *= submodel.label[0];
}
}
}
sigmoid_train(prob.l,dec_values,prob.y,probAB);
}
// Return parameter of a Laplace distribution
private static double svm_svr_probability(svm_problem prob, svm_parameter param)
{
int i;
int nr_fold = 5;
double[] ymv = new double[prob.l];
double mae = 0;
svm_parameter newparam = (svm_parameter)param.clone();
newparam.probability = 0;
svm_cross_validation(prob,newparam,nr_fold,ymv);
for(i=0;i<prob.l;i++)
{
ymv[i]=prob.y[i]-ymv[i];
mae += Math.abs(ymv[i]);
}
mae /= prob.l;
double std=Math.sqrt(2*mae*mae);
int count=0;
mae=0;
for(i=0;i<prob.l;i++)
if (Math.abs(ymv[i]) > 5*std)
count=count+1;
else
mae+=Math.abs(ymv[i]);
mae /= (prob.l-count);
System.err.print("Prob. model for test data: target value = predicted value + z,\nz: Laplace distribution e^(-|z|/sigma)/(2sigma),sigma="+mae+"\n");
return mae;
}
// label: label name, start: begin of each class, count: #data of classes, perm: indices to the original data
// perm, length l, must be allocated before calling this subroutine
private static void svm_group_classes(svm_problem prob, int[] nr_class_ret, int[][] label_ret, int[][] start_ret, int[][] count_ret, int[] perm)
{
int l = prob.l;
int max_nr_class = 16;
int nr_class = 0;
int[] label = new int[max_nr_class];
int[] count = new int[max_nr_class];
int[] data_label = new int[l];
int i;
for(i=0;i<l;i++)
{
int this_label = (int)(prob.y[i]);
int j;
for(j=0;j<nr_class;j++)
{
if(this_label == label[j])
{
++count[j];
break;
}
}
data_label[i] = j;
if(j == nr_class)
{
if(nr_class == max_nr_class)
{
max_nr_class *= 2;
int[] new_data = new int[max_nr_class];
System.arraycopy(label,0,new_data,0,label.length);
label = new_data;
new_data = new int[max_nr_class];
System.arraycopy(count,0,new_data,0,count.length);
count = new_data;
}
label[nr_class] = this_label;
count[nr_class] = 1;
++nr_class;
}
}
int[] start = new int[nr_class];
start[0] = 0;
for(i=1;i<nr_class;i++)
start[i] = start[i-1]+count[i-1];
for(i=0;i<l;i++)
{
perm[start[data_label[i]]] = i;
++start[data_label[i]];
}
start[0] = 0;
for(i=1;i<nr_class;i++)
start[i] = start[i-1]+count[i-1];
nr_class_ret[0] = nr_class;
label_ret[0] = label;
start_ret[0] = start;
count_ret[0] = count;
}
//
// Interface functions
//
public static svm_model svm_train(svm_problem prob, svm_parameter param)
{
svm_model model = new svm_model();
model.param = param;
if(param.svm_type == svm_parameter.ONE_CLASS ||
param.svm_type == svm_parameter.EPSILON_SVR ||
param.svm_type == svm_parameter.NU_SVR)
{
// regression or one-class-svm
model.nr_class = 2;
model.label = null;
model.nSV = null;
model.probA = null; model.probB = null;
model.sv_coef = new double[1][];
if(param.probability == 1 &&
(param.svm_type == svm_parameter.EPSILON_SVR ||
param.svm_type == svm_parameter.NU_SVR))
{
model.probA = new double[1];
model.probA[0] = svm_svr_probability(prob,param);
}
decision_function f = svm_train_one(prob,param,0,0);
model.rho = new double[1];
model.rho[0] = f.rho;
int nSV = 0;
int i;
for(i=0;i<prob.l;i++)
if(Math.abs(f.alpha[i]) > 0) ++nSV;
model.l = nSV;
model.SV = new svm_node[nSV][];
model.sv_coef[0] = new double[nSV];
int j = 0;
for(i=0;i<prob.l;i++)
if(Math.abs(f.alpha[i]) > 0)
{
model.SV[j] = prob.x[i];
model.sv_coef[0][j] = f.alpha[i];
++j;
}
}
else
{
// classification
int l = prob.l;
int[] tmp_nr_class = new int[1];
int[][] tmp_label = new int[1][];
int[][] tmp_start = new int[1][];
int[][] tmp_count = new int[1][];
int[] perm = new int[l];
// group training data of the same class
svm_group_classes(prob,tmp_nr_class,tmp_label,tmp_start,tmp_count,perm);
int nr_class = tmp_nr_class[0];
int[] label = tmp_label[0];
int[] start = tmp_start[0];
int[] count = tmp_count[0];
svm_node[][] x = new svm_node[l][];
int i;
for(i=0;i<l;i++)
x[i] = prob.x[perm[i]];
// calculate weighted C
double[] weighted_C = new double[nr_class];
for(i=0;i<nr_class;i++)
weighted_C[i] = param.C;
for(i=0;i<param.nr_weight;i++)
{
int j;
for(j=0;j<nr_class;j++)
if(param.weight_label[i] == label[j])
break;
if(j == nr_class)
System.err.print("warning: class label "+param.weight_label[i]+" specified in weight is not found\n");
else
weighted_C[j] *= param.weight[i];
}
// train k*(k-1)/2 models
boolean[] nonzero = new boolean[l];
for(i=0;i<l;i++)
nonzero[i] = false;
decision_function[] f = new decision_function[nr_class*(nr_class-1)/2];
double[] probA=null,probB=null;
if (param.probability == 1)
{
probA=new double[nr_class*(nr_class-1)/2];
probB=new double[nr_class*(nr_class-1)/2];
}
int p = 0;
for(i=0;i<nr_class;i++)
for(int j=i+1;j<nr_class;j++)
{
svm_problem sub_prob = new svm_problem();
int si = start[i], sj = start[j];
int ci = count[i], cj = count[j];
sub_prob.l = ci+cj;
sub_prob.x = new svm_node[sub_prob.l][];
sub_prob.y = new double[sub_prob.l];
int k;
for(k=0;k<ci;k++)
{
sub_prob.x[k] = x[si+k];
sub_prob.y[k] = +1;
}
for(k=0;k<cj;k++)
{
sub_prob.x[ci+k] = x[sj+k];
sub_prob.y[ci+k] = -1;
}
if(param.probability == 1)
{
double[] probAB=new double[2];
svm_binary_svc_probability(sub_prob,param,weighted_C[i],weighted_C[j],probAB);
probA[p]=probAB[0];
probB[p]=probAB[1];
}
f[p] = svm_train_one(sub_prob,param,weighted_C[i],weighted_C[j]);
for(k=0;k<ci;k++)
if(!nonzero[si+k] && Math.abs(f[p].alpha[k]) > 0)
nonzero[si+k] = true;
for(k=0;k<cj;k++)
if(!nonzero[sj+k] && Math.abs(f[p].alpha[ci+k]) > 0)
nonzero[sj+k] = true;
++p;
}
// build output
model.nr_class = nr_class;
model.label = new int[nr_class];
for(i=0;i<nr_class;i++)
model.label[i] = label[i];
model.rho = new double[nr_class*(nr_class-1)/2];
for(i=0;i<nr_class*(nr_class-1)/2;i++)
model.rho[i] = f[i].rho;
if(param.probability == 1)
{
model.probA = new double[nr_class*(nr_class-1)/2];
model.probB = new double[nr_class*(nr_class-1)/2];
for(i=0;i<nr_class*(nr_class-1)/2;i++)
{
model.probA[i] = probA[i];
model.probB[i] = probB[i];
}
}
else
{
model.probA=null;
model.probB=null;
}
int nnz = 0;
int[] nz_count = new int[nr_class];
model.nSV = new int[nr_class];
for(i=0;i<nr_class;i++)
{
int nSV = 0;
for(int j=0;j<count[i];j++)
if(nonzero[start[i]+j])
{
++nSV;
++nnz;
}
model.nSV[i] = nSV;
nz_count[i] = nSV;
}
System.out.print("Total nSV = "+nnz+"\n");
model.l = nnz;
model.SV = new svm_node[nnz][];
p = 0;
for(i=0;i<l;i++)
if(nonzero[i]) model.SV[p++] = x[i];
int[] nz_start = new int[nr_class];
nz_start[0] = 0;
for(i=1;i<nr_class;i++)
nz_start[i] = nz_start[i-1]+nz_count[i-1];
model.sv_coef = new double[nr_class-1][];
for(i=0;i<nr_class-1;i++)
model.sv_coef[i] = new double[nnz];
p = 0;
for(i=0;i<nr_class;i++)
for(int j=i+1;j<nr_class;j++)
{
// classifier (i,j): coefficients with
// i are in sv_coef[j-1][nz_start[i]...],
// j are in sv_coef[i][nz_start[j]...]
int si = start[i];
int sj = start[j];
int ci = count[i];
int cj = count[j];
int q = nz_start[i];
int k;
for(k=0;k<ci;k++)
if(nonzero[si+k])
model.sv_coef[j-1][q++] = f[p].alpha[k];
q = nz_start[j];
for(k=0;k<cj;k++)
if(nonzero[sj+k])
model.sv_coef[i][q++] = f[p].alpha[ci+k];
++p;
}
}
return model;
}
// Stratified cross validation
public static void svm_cross_validation(svm_problem prob, svm_parameter param, int nr_fold, double[] target)
{
int i;
int[] fold_start = new int[nr_fold+1];
int l = prob.l;
int[] perm = new int[l];
// stratified cv may not give leave-one-out rate
// Each class to l folds -> some folds may have zero elements
if((param.svm_type == svm_parameter.C_SVC ||
param.svm_type == svm_parameter.NU_SVC) && nr_fold < l)
{
int[] tmp_nr_class = new int[1];
int[][] tmp_label = new int[1][];
int[][] tmp_start = new int[1][];
int[][] tmp_count = new int[1][];
svm_group_classes(prob,tmp_nr_class,tmp_label,tmp_start,tmp_count,perm);
int nr_class = tmp_nr_class[0];
int[] label = tmp_label[0];
int[] start = tmp_start[0];
int[] count = tmp_count[0];
// random shuffle and then data grouped by fold using the array perm
int[] fold_count = new int[nr_fold];
int c;
int[] index = new int[l];
for(i=0;i<l;i++)
index[i]=perm[i];
for (c=0; c<nr_class; c++)
for(i=0;i<count[c];i++)
{
int j = i+(int)(Math.random()*(count[c]-i));
do {int _=index[start[c]+j]; index[start[c]+j]=index[start[c]+i]; index[start[c]+i]=_;} while(false);
}
for(i=0;i<nr_fold;i++)
{
fold_count[i] = 0;
for (c=0; c<nr_class;c++)
fold_count[i]+=(i+1)*count[c]/nr_fold-i*count[c]/nr_fold;
}
fold_start[0]=0;
for (i=1;i<=nr_fold;i++)
fold_start[i] = fold_start[i-1]+fold_count[i-1];
for (c=0; c<nr_class;c++)
for(i=0;i<nr_fold;i++)
{
int begin = start[c]+i*count[c]/nr_fold;
int end = start[c]+(i+1)*count[c]/nr_fold;
for(int j=begin;j<end;j++)
{
perm[fold_start[i]] = index[j];
fold_start[i]++;
}
}
fold_start[0]=0;
for (i=1;i<=nr_fold;i++)
fold_start[i] = fold_start[i-1]+fold_count[i-1];
}
else
{
for(i=0;i<l;i++) perm[i]=i;
for(i=0;i<l;i++)
{
int j = i+(int)(Math.random()*(l-i));
do {int _=perm[i]; perm[i]=perm[j]; perm[j]=_;} while(false);
}
for(i=0;i<=nr_fold;i++)
fold_start[i]=i*l/nr_fold;
}
for(i=0;i<nr_fold;i++)
{
int begin = fold_start[i];
int end = fold_start[i+1];
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -