📄 jmysvmlearner.java
字号:
/*
* YALE - Yet Another Learning Environment
* Copyright (C) 2001-2004
* Simon Fischer, Ralf Klinkenberg, Ingo Mierswa,
* Katharina Morik, Oliver Ritthoff
* Artificial Intelligence Unit
* Computer Science Department
* University of Dortmund
* 44221 Dortmund, Germany
* email: yale-team@lists.sourceforge.net
* web: http://yale.cs.uni-dortmund.de/
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License as
* published by the Free Software Foundation; either version 2 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
* USA.
*/
package edu.udo.cs.yale.operator.learner.kernel;
import edu.udo.cs.yale.example.Attribute;
import edu.udo.cs.yale.operator.parameter.*;
import edu.udo.cs.yale.operator.performance.PerformanceVector;
import edu.udo.cs.yale.operator.performance.EstimatedPerformance;
import edu.udo.cs.mySVM.SVM.*;
import edu.udo.cs.mySVM.Kernel.Kernel;
import edu.udo.cs.mySVM.Examples.ExampleSet;
import java.util.List;
/** This learner uses the Java implementation of the support vector machine <em>mySVM</em>
* by Stefan Rüping. This learning method can be used for both regression and classification
* and provides a fast algorithm and good results for many learning tasks.
*
* @yale.reference Rueping/2000a
* @yale.reference Vapnik/98a
* @yale.index SVM
*
* @version $Id: JMySVMLearner.java,v 1.3 2004/08/27 11:57:40 ingomierswa Exp $
*/
public class JMySVMLearner extends AbstractMySVMLearner {
/** Indicates if the SVM is used for classification learning. */
private boolean pattern = true;
/** Returns true iff the learner can generate a performance vector during training. */
public boolean canEstimatePerformance() {
return pattern && getParameterAsBoolean("xi_alpha");
}
/** Returns the estimated performances of this SVM. Does only work for classification tasks. */
public PerformanceVector getEstimatedPerformance() {
if (!pattern)
throw new RuntimeException("JMySVM-Learner: Cannot calculate leave one out estimation of error!");
double[] estVector = ((SVMpattern)getSVM()).getXiAlphaEstimation(getKernel());
PerformanceVector pv = new PerformanceVector();
pv.addCriterion(new EstimatedPerformance("xialpha_error", estVector[0], 1, true));
pv.addCriterion(new EstimatedPerformance("xialpha_precision", estVector[1], 1, false));
pv.addCriterion(new EstimatedPerformance("xialpha_recall", estVector[2], 1, false));
pv.setMainCriterionName("xialpha_error");
return pv;
}
public AbstractMySVMModel createSVMModel(Attribute label, ExampleSet exampleSet, Kernel kernel, int kernelType) {
return new JMySVMModel(label, exampleSet, kernel, kernelType);
}
public SVMInterface createSVM(Attribute label, Kernel kernel, ExampleSet exampleSet) {
if (label.isNominal()) {
this.pattern = true;
return new SVMpattern(this, kernel, exampleSet);
} else {
this.pattern = false;
return new SVMregression(this, kernel, exampleSet);
}
}
public List getParameterTypes() {
List types = super.getParameterTypes();
types.add(new ParameterTypeDouble("L_pos", "A factor for the SVM complexity constant for positive examples", 0,
Double.POSITIVE_INFINITY, 1.0d));
types.add(new ParameterTypeDouble("L_neg", "A factor for the SVM complexity constant for negative examples", 0,
Double.POSITIVE_INFINITY, 1.0d));
types.add(new ParameterTypeDouble("epsilon",
"Insensitivity constant. No loss if prediction lies this close to true value",
0.0d, Double.POSITIVE_INFINITY, 0.0d));
types.add(new ParameterTypeDouble("epsilon+", "Epsilon for positive deviation only",
0.0d, Double.POSITIVE_INFINITY, 0.0d));
types.add(new ParameterTypeDouble("epsilon-", "Epsilon for negative deviation only",
0.0d, Double.POSITIVE_INFINITY, 0.0d));
types.add(new ParameterTypeBoolean("balance_cost",
"Adapts Cpos and Cneg to the relative size of the classes", false));
types.add(new ParameterTypeBoolean("quadratic_loss_pos", "Use quadratic loss for positive deviation", false));
types.add(new ParameterTypeBoolean("quadratic_loss_neg", "Use quadratic loss for negative deviation", false));
types.add(new ParameterTypeBoolean("xi_alpha", "Calculate xi alpha estimation for classification task.", false));
return types;
}
}
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -