additiveregression.java

来自「Weka」· Java 代码 · 共 527 行 · 第 1/2 页

JAVA
527
字号
/* *    This program is free software; you can redistribute it and/or modify *    it under the terms of the GNU General Public License as published by *    the Free Software Foundation; either version 2 of the License, or *    (at your option) any later version. * *    This program is distributed in the hope that it will be useful, *    but WITHOUT ANY WARRANTY; without even the implied warranty of *    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the *    GNU General Public License for more details. * *    You should have received a copy of the GNU General Public License *    along with this program; if not, write to the Free Software *    Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA. *//* *    AdditiveRegression.java *    Copyright (C) 2000 University of Waikato, Hamilton, New Zealand * */package weka.classifiers.meta;import weka.classifiers.Classifier;import weka.classifiers.IteratedSingleClassifierEnhancer;import weka.classifiers.rules.ZeroR;import weka.core.AdditionalMeasureProducer;import weka.core.Capabilities;import weka.core.Instance;import weka.core.Instances;import weka.core.Option;import weka.core.OptionHandler;import weka.core.TechnicalInformation;import weka.core.TechnicalInformationHandler;import weka.core.Utils;import weka.core.WeightedInstancesHandler;import weka.core.Capabilities.Capability;import weka.core.TechnicalInformation.Field;import weka.core.TechnicalInformation.Type;import java.util.Enumeration;import java.util.Vector;/** <!-- globalinfo-start --> * Meta classifier that enhances the performance of a regression base classifier. Each iteration fits a model to the residuals left by the classifier on the previous iteration. Prediction is accomplished by adding the predictions of each classifier. Reducing the shrinkage (learning rate) parameter helps prevent overfitting and has a smoothing effect but increases the learning time.<br/> * <br/> * For more information see:<br/> * <br/> * J.H. Friedman (1999). Stochastic Gradient Boosting. * <p/> <!-- globalinfo-end --> * <!-- technical-bibtex-start --> * BibTeX: * <pre> * &#64;techreport{Friedman1999, *    author = {J.H. Friedman}, *    institution = {Stanford University}, *    title = {Stochastic Gradient Boosting}, *    year = {1999}, *    PS = {http://www-stat.stanford.edu/\~jhf/ftp/stobst.ps} * } * </pre> * <p/> <!-- technical-bibtex-end --> * <!-- options-start --> * Valid options are: <p/> *  * <pre> -S *  Specify shrinkage rate. (default = 1.0, ie. no shrinkage) * </pre> *  * <pre> -I &lt;num&gt; *  Number of iterations. *  (default 10)</pre> *  * <pre> -D *  If set, classifier is run in debug mode and *  may output additional info to the console</pre> *  * <pre> -W *  Full name of base classifier. *  (default: weka.classifiers.trees.DecisionStump)</pre> *  * <pre>  * Options specific to classifier weka.classifiers.trees.DecisionStump: * </pre> *  * <pre> -D *  If set, classifier is run in debug mode and *  may output additional info to the console</pre> *  <!-- options-end --> * * @author Mark Hall (mhall@cs.waikato.ac.nz) * @version $Revision: 1.24 $ */public class AdditiveRegression   extends IteratedSingleClassifierEnhancer   implements OptionHandler,	     AdditionalMeasureProducer,	     WeightedInstancesHandler,	     TechnicalInformationHandler {  /** for serialization */  static final long serialVersionUID = -2368937577670527151L;    /**   * Shrinkage (Learning rate). Default = no shrinkage.   */  protected double m_shrinkage = 1.0;  /** The number of successfully generated base classifiers. */  protected int m_NumIterationsPerformed;  /** The model for the mean */  protected ZeroR m_zeroR;  /** whether we have suitable data or nor (if not, ZeroR model is used) */  protected boolean m_SuitableData = true;    /**   * Returns a string describing this attribute evaluator   * @return a description of the evaluator suitable for   * displaying in the explorer/experimenter gui   */  public String globalInfo() {    return " Meta classifier that enhances the performance of a regression "      +"base classifier. Each iteration fits a model to the residuals left "      +"by the classifier on the previous iteration. Prediction is "      +"accomplished by adding the predictions of each classifier. "      +"Reducing the shrinkage (learning rate) parameter helps prevent "      +"overfitting and has a smoothing effect but increases the learning "      +"time.\n\n"      +"For more information see:\n\n"      + getTechnicalInformation().toString();  }  /**   * Returns an instance of a TechnicalInformation object, containing    * detailed information about the technical background of this class,   * e.g., paper reference or book this class is based on.   *    * @return the technical information about this class   */  public TechnicalInformation getTechnicalInformation() {    TechnicalInformation 	result;        result = new TechnicalInformation(Type.TECHREPORT);    result.setValue(Field.AUTHOR, "J.H. Friedman");    result.setValue(Field.YEAR, "1999");    result.setValue(Field.TITLE, "Stochastic Gradient Boosting");    result.setValue(Field.INSTITUTION, "Stanford University");    result.setValue(Field.PS, "http://www-stat.stanford.edu/~jhf/ftp/stobst.ps");        return result;  }  /**   * Default constructor specifying DecisionStump as the classifier   */  public AdditiveRegression() {    this(new weka.classifiers.trees.DecisionStump());  }  /**   * Constructor which takes base classifier as argument.   *   * @param classifier the base classifier to use   */  public AdditiveRegression(Classifier classifier) {    m_Classifier = classifier;  }  /**   * String describing default classifier.   *    * @return the default classifier classname   */  protected String defaultClassifierString() {        return "weka.classifiers.trees.DecisionStump";  }  /**   * Returns an enumeration describing the available options.   *   * @return an enumeration of all the available options.   */  public Enumeration listOptions() {    Vector newVector = new Vector(4);    newVector.addElement(new Option(	      "\tSpecify shrinkage rate. "	      +"(default = 1.0, ie. no shrinkage)\n", 	      "S", 1, "-S"));    Enumeration enu = super.listOptions();    while (enu.hasMoreElements()) {      newVector.addElement(enu.nextElement());    }    return newVector.elements();  }  /**   * Parses a given list of options. <p/>   *   <!-- options-start -->   * Valid options are: <p/>   *    * <pre> -S   *  Specify shrinkage rate. (default = 1.0, ie. no shrinkage)   * </pre>   *    * <pre> -I &lt;num&gt;   *  Number of iterations.   *  (default 10)</pre>   *    * <pre> -D   *  If set, classifier is run in debug mode and   *  may output additional info to the console</pre>   *    * <pre> -W   *  Full name of base classifier.   *  (default: weka.classifiers.trees.DecisionStump)</pre>   *    * <pre>    * Options specific to classifier weka.classifiers.trees.DecisionStump:   * </pre>   *    * <pre> -D   *  If set, classifier is run in debug mode and   *  may output additional info to the console</pre>   *    <!-- options-end -->   *   * @param options the list of options as an array of strings   * @throws Exception if an option is not supported   */  public void setOptions(String[] options) throws Exception {    String optionString = Utils.getOption('S', options);    if (optionString.length() != 0) {      Double temp = Double.valueOf(optionString);      setShrinkage(temp.doubleValue());    }    super.setOptions(options);  }  /**   * Gets the current settings of the Classifier.   *   * @return an array of strings suitable for passing to setOptions   */  public String [] getOptions() {        String [] superOptions = super.getOptions();

⌨️ 快捷键说明

复制代码Ctrl + C
搜索代码Ctrl + F
全屏模式F11
增大字号Ctrl + =
减小字号Ctrl + -
显示快捷键?