⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 miboost.java

📁 代码是一个分类器的实现,其中使用了部分weka的源代码。可以将项目导入eclipse运行
💻 JAVA
📖 第 1 页 / 共 2 页
字号:
/* *    This program is free software; you can redistribute it and/or modify *    it under the terms of the GNU General Public License as published by *    the Free Software Foundation; either version 2 of the License, or *    (at your option) any later version. * *    This program is distributed in the hope that it will be useful, *    but WITHOUT ANY WARRANTY; without even the implied warranty of *    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the *    GNU General Public License for more details. * *    You should have received a copy of the GNU General Public License *    along with this program; if not, write to the Free Software *    Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA. *//* * MIBoost.java * Copyright (C) 2005 University of Waikato, Hamilton, New Zealand * */package weka.classifiers.mi;import weka.classifiers.Classifier;import weka.classifiers.SingleClassifierEnhancer;import weka.core.Capabilities;import weka.core.Instance;import weka.core.Instances;import weka.core.MultiInstanceCapabilitiesHandler;import weka.core.Optimization;import weka.core.Option;import weka.core.OptionHandler;import weka.core.TechnicalInformation;import weka.core.TechnicalInformationHandler;import weka.core.Utils;import weka.core.WeightedInstancesHandler;import weka.core.Capabilities.Capability;import weka.core.TechnicalInformation.Field;import weka.core.TechnicalInformation.Type;import weka.filters.Filter;import weka.filters.unsupervised.attribute.Discretize;import weka.filters.unsupervised.attribute.MultiInstanceToPropositional;import java.util.Enumeration;import java.util.Vector;/** <!-- globalinfo-start --> * MI AdaBoost method, considers the geometric mean of posterior of instances inside a bag (arithmatic mean of log-posterior) and the expectation for a bag is taken inside the loss function.<br/> * <br/> * For more information about Adaboost, see:<br/> * <br/> * Yoav Freund, Robert E. Schapire: Experiments with a new boosting algorithm. In: Thirteenth International Conference on Machine Learning, San Francisco, 148-156, 1996. * <p/> <!-- globalinfo-end --> * <!-- technical-bibtex-start --> * BibTeX: * <pre> * &#64;inproceedings{Freund1996, *    address = {San Francisco}, *    author = {Yoav Freund and Robert E. Schapire}, *    booktitle = {Thirteenth International Conference on Machine Learning}, *    pages = {148-156}, *    publisher = {Morgan Kaufmann}, *    title = {Experiments with a new boosting algorithm}, *    year = {1996} * } * </pre> * <p/> <!-- technical-bibtex-end --> * <!-- options-start --> * Valid options are: <p/> *  * <pre> -D *  Turn on debugging output.</pre> *  * <pre> -B &lt;num&gt; *  The number of bins in discretization *  (default 0, no discretization)</pre> *  * <pre> -R &lt;num&gt; *  Maximum number of boost iterations. *  (default 10)</pre> *  * <pre> -W &lt;class name&gt; *  Full name of classifier to boost. *  eg: weka.classifiers.bayes.NaiveBayes</pre> *  * <pre> -D *  If set, classifier is run in debug mode and *  may output additional info to the console</pre> *  <!-- options-end --> * * @author Eibe Frank (eibe@cs.waikato.ac.nz) * @author Xin Xu (xx5@cs.waikato.ac.nz) * @version $Revision: 1.5 $  */public class MIBoost   extends SingleClassifierEnhancer  implements OptionHandler, MultiInstanceCapabilitiesHandler,             TechnicalInformationHandler {  /** for serialization */  static final long serialVersionUID = -3808427225599279539L;    /** the models for the iterations */  protected Classifier[] m_Models;  /** The number of the class labels */  protected int m_NumClasses;  /** Class labels for each bag */  protected int[] m_Classes;  /** attributes name for the new dataset used to build the model  */  protected Instances m_Attributes;  /** Number of iterations */     private int m_NumIterations = 100;  /** Voting weights of models */   protected double[] m_Beta;  /** the maximum number of boost iterations */  protected int m_MaxIterations = 10;  /** the number of discretization bins */  protected int m_DiscretizeBin = 0;  /** filter used for discretization */  protected Discretize m_Filter = null;  /** filter used to convert the MI dataset into single-instance dataset */  protected MultiInstanceToPropositional m_ConvertToSI = new MultiInstanceToPropositional();  /**   * Returns a string describing this filter   *   * @return a description of the filter suitable for   * displaying in the explorer/experimenter gui   */  public String globalInfo() {    return         "MI AdaBoost method, considers the geometric mean of posterior "      + "of instances inside a bag (arithmatic mean of log-posterior) and "      + "the expectation for a bag is taken inside the loss function.\n\n"      + "For more information about Adaboost, see:\n\n"      + getTechnicalInformation().toString();  }  /**   * Returns an instance of a TechnicalInformation object, containing    * detailed information about the technical background of this class,   * e.g., paper reference or book this class is based on.   *    * @return the technical information about this class   */  public TechnicalInformation getTechnicalInformation() {    TechnicalInformation 	result;        result = new TechnicalInformation(Type.INPROCEEDINGS);    result.setValue(Field.AUTHOR, "Yoav Freund and Robert E. Schapire");    result.setValue(Field.TITLE, "Experiments with a new boosting algorithm");    result.setValue(Field.BOOKTITLE, "Thirteenth International Conference on Machine Learning");    result.setValue(Field.YEAR, "1996");    result.setValue(Field.PAGES, "148-156");    result.setValue(Field.PUBLISHER, "Morgan Kaufmann");    result.setValue(Field.ADDRESS, "San Francisco");        return result;  }  /**   * Returns an enumeration describing the available options   *   * @return an enumeration of all the available options   */  public Enumeration listOptions() {    Vector result = new Vector();    result.addElement(new Option(          "\tTurn on debugging output.",          "D", 0, "-D"));    result.addElement(new Option(          "\tThe number of bins in discretization\n"          + "\t(default 0, no discretization)",          "B", 1, "-B <num>"));	    result.addElement(new Option(          "\tMaximum number of boost iterations.\n"          + "\t(default 10)",          "R", 1, "-R <num>"));	    result.addElement(new Option(          "\tFull name of classifier to boost.\n"          + "\teg: weka.classifiers.bayes.NaiveBayes",          "W", 1, "-W <class name>"));    Enumeration enu = ((OptionHandler)m_Classifier).listOptions();    while (enu.hasMoreElements()) {      result.addElement(enu.nextElement());    }    return result.elements();  }  /**   * Parses a given list of options. <p/>   *    <!-- options-start -->   * Valid options are: <p/>   *    * <pre> -D   *  Turn on debugging output.</pre>   *    * <pre> -B &lt;num&gt;   *  The number of bins in discretization   *  (default 0, no discretization)</pre>   *    * <pre> -R &lt;num&gt;   *  Maximum number of boost iterations.   *  (default 10)</pre>   *    * <pre> -W &lt;class name&gt;   *  Full name of classifier to boost.   *  eg: weka.classifiers.bayes.NaiveBayes</pre>   *    * <pre> -D   *  If set, classifier is run in debug mode and   *  may output additional info to the console</pre>   *    <!-- options-end -->   *   * @param options the list of options as an array of strings   * @throws Exception if an option is not supported   */  public void setOptions(String[] options) throws Exception {    setDebug(Utils.getFlag('D', options));    String bin = Utils.getOption('B', options);    if (bin.length() != 0) {      setDiscretizeBin(Integer.parseInt(bin));    } else {      setDiscretizeBin(0);    }    String boostIterations = Utils.getOption('R', options);    if (boostIterations.length() != 0) {      setMaxIterations(Integer.parseInt(boostIterations));    } else {      setMaxIterations(10);    }    super.setOptions(options);  }  /**   * Gets the current settings of the classifier.   *   * @return an array of strings suitable for passing to setOptions   */  public String[] getOptions() {    Vector        result;    String[]      options;    int           i;        result  = new Vector();    result.add("-R");    result.add("" + getMaxIterations());    result.add("-B");    result.add("" + getDiscretizeBin());    options = super.getOptions();    for (i = 0; i < options.length; i++)      result.add(options[i]);    return (String[]) result.toArray(new String[result.size()]);  }  /**   * Returns the tip text for this property   *   * @return tip text for this property suitable for   * displaying in the explorer/experimenter gui   */  public String maxIterationsTipText() {    return "The maximum number of boost iterations.";  }  /**   * Set the maximum number of boost iterations   *   * @param maxIterations the maximum number of boost iterations   */  public void setMaxIterations(int maxIterations) {	    m_MaxIterations = maxIterations;  }  /**   * Get the maximum number of boost iterations   *   * @return the maximum number of boost iterations   */  public int getMaxIterations() {    return m_MaxIterations;  }  /**   * Returns the tip text for this property   *   * @return tip text for this property suitable for   * displaying in the explorer/experimenter gui   */  public String discretizeBinTipText() {    return "The number of bins in discretization.";  }  /**   * Set the number of bins in discretization   *   * @param bin the number of bins in discretization   */  public void setDiscretizeBin(int bin) {	    m_DiscretizeBin = bin;  }  /**   * Get the number of bins in discretization   *   * @return the number of bins in discretization   */  public int getDiscretizeBin() {	    return m_DiscretizeBin;  }  private class OptEng 

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -