⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 lwl.java

📁 代码是一个分类器的实现,其中使用了部分weka的源代码。可以将项目导入eclipse运行
💻 JAVA
📖 第 1 页 / 共 2 页
字号:
/* *    This program is free software; you can redistribute it and/or modify *    it under the terms of the GNU General Public License as published by *    the Free Software Foundation; either version 2 of the License, or *    (at your option) any later version. * *    This program is distributed in the hope that it will be useful, *    but WITHOUT ANY WARRANTY; without even the implied warranty of *    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the *    GNU General Public License for more details. * *    You should have received a copy of the GNU General Public License *    along with this program; if not, write to the Free Software *    Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA. *//* *    LWL.java *    Copyright (C) 1999, 2002, 2003 Len Trigg, Eibe Frank, Ashraf M. Kibriya * */package weka.classifiers.lazy;import weka.classifiers.SingleClassifierEnhancer;import weka.classifiers.UpdateableClassifier;import weka.core.Capabilities;import weka.core.Instance;import weka.core.Instances;import weka.core.LinearNN;import weka.core.NearestNeighbourSearch;import weka.core.Option;import weka.core.TechnicalInformation;import weka.core.TechnicalInformationHandler;import weka.core.Utils;import weka.core.WeightedInstancesHandler;import weka.core.Capabilities.Capability;import weka.core.TechnicalInformation.Field;import weka.core.TechnicalInformation.Type;import java.util.Enumeration;import java.util.Vector;/** <!-- globalinfo-start --> * Locally weighted learning. Uses an instance-based algorithm to assign instance weights which are then used by a specified WeightedInstancesHandler.<br/> * Can do classification (e.g. using naive Bayes) or regression (e.g. using linear regression).<br/> * <br/> * For more info, see<br/> * <br/> * Eibe Frank, Mark Hall, Bernhard Pfahringer: Locally Weighted Naive Bayes. In: 19th Conference in Uncertainty in Artificial Intelligence, 249-256, 2003.<br/> * <br/> * C. Atkeson, A. Moore, S. Schaal (1996). Locally weighted learning. AI Review.. * <p/> <!-- globalinfo-end --> * <!-- technical-bibtex-start --> * BibTeX: * <pre> * &#64;inproceedings{Frank2003, *    author = {Eibe Frank and Mark Hall and Bernhard Pfahringer}, *    booktitle = {19th Conference in Uncertainty in Artificial Intelligence}, *    pages = {249-256}, *    publisher = {Morgan Kaufmann}, *    title = {Locally Weighted Naive Bayes}, *    year = {2003} * } *  * &#64;article{Atkeson1996, *    author = {C. Atkeson and A. Moore and S. Schaal}, *    journal = {AI Review}, *    title = {Locally weighted learning}, *    year = {1996} * } * </pre> * <p/> <!-- technical-bibtex-end --> * <!-- options-start --> * Valid options are: <p/> *  * <pre> -A *  The nearest neighbour search algorithm to use (default: LinearNN). * </pre> *  * <pre> -K &lt;number of neighbours&gt; *  Set the number of neighbours used to set the kernel bandwidth. *  (default all)</pre> *  * <pre> -U &lt;number of weighting method&gt; *  Set the weighting kernel shape to use. 0=Linear, 1=Epanechnikov, *  2=Tricube, 3=Inverse, 4=Gaussian. *  (default 0 = Linear)</pre> *  * <pre> -D *  If set, classifier is run in debug mode and *  may output additional info to the console</pre> *  * <pre> -W *  Full name of base classifier. *  (default: weka.classifiers.trees.DecisionStump)</pre> *  * <pre>  * Options specific to classifier weka.classifiers.trees.DecisionStump: * </pre> *  * <pre> -D *  If set, classifier is run in debug mode and *  may output additional info to the console</pre> *  <!-- options-end --> * * @author Len Trigg (trigg@cs.waikato.ac.nz) * @author Eibe Frank (eibe@cs.waikato.ac.nz) * @author Ashraf M. Kibriya (amk14@waikato.ac.nz) * @version $Revision: 1.18 $  */public class LWL   extends SingleClassifierEnhancer  implements UpdateableClassifier, WeightedInstancesHandler,              TechnicalInformationHandler {  /** for serialization */  static final long serialVersionUID = 1979797405383665815L;  /** The training instances used for classification. */  protected Instances m_Train;      /** The number of neighbours used to select the kernel bandwidth */  protected int m_kNN = -1;  /** The weighting kernel method currently selected */  protected int m_WeightKernel = LINEAR;  /** True if m_kNN should be set to all instances */  protected boolean m_UseAllK = true;    /** The nearest neighbour search algorithm to use. (Default: LinearNN) */  protected NearestNeighbourSearch m_NNSearch = new LinearNN();    /** The available kernel weighting methods */  protected static final int LINEAR       = 0;  protected static final int EPANECHNIKOV = 1;  protected static final int TRICUBE      = 2;    protected static final int INVERSE      = 3;  protected static final int GAUSS        = 4;  protected static final int CONSTANT     = 5;  /**   * Returns a string describing classifier   * @return a description suitable for   * displaying in the explorer/experimenter gui   */  public String globalInfo() {    return         "Locally weighted learning. Uses an instance-based algorithm to "      + "assign instance weights which are then used by a specified "      + "WeightedInstancesHandler.\n"      + "Can do classification (e.g. using naive Bayes) or regression "      + "(e.g. using linear regression).\n\n"      + "For more info, see\n\n"      + getTechnicalInformation().toString();  }  /**   * Returns an instance of a TechnicalInformation object, containing    * detailed information about the technical background of this class,   * e.g., paper reference or book this class is based on.   *    * @return the technical information about this class   */  public TechnicalInformation getTechnicalInformation() {    TechnicalInformation 	result;    TechnicalInformation 	additional;        result = new TechnicalInformation(Type.INPROCEEDINGS);    result.setValue(Field.AUTHOR, "Eibe Frank and Mark Hall and Bernhard Pfahringer");    result.setValue(Field.YEAR, "2003");    result.setValue(Field.TITLE, "Locally Weighted Naive Bayes");    result.setValue(Field.BOOKTITLE, "19th Conference in Uncertainty in Artificial Intelligence");    result.setValue(Field.PAGES, "249-256");    result.setValue(Field.PUBLISHER, "Morgan Kaufmann");        additional = result.add(Type.ARTICLE);    additional.setValue(Field.AUTHOR, "C. Atkeson and A. Moore and S. Schaal");    additional.setValue(Field.YEAR, "1996");    additional.setValue(Field.TITLE, "Locally weighted learning");    additional.setValue(Field.JOURNAL, "AI Review");        return result;  }      /**   * Constructor.   */  public LWL() {        m_Classifier = new weka.classifiers.trees.DecisionStump();  }  /**   * String describing default classifier.   *    * @return the default classifier classname   */  protected String defaultClassifierString() {        return "weka.classifiers.trees.DecisionStump";  }  /**   * Returns an enumeration describing the available options.   *   * @return an enumeration of all the available options.   */  public Enumeration listOptions() {        Vector newVector = new Vector(3);    newVector.addElement(new Option("\tThe nearest neighbour search " +                                    "algorithm to use (default: LinearNN).\n",                                    "A", 0, "-A"));    newVector.addElement(new Option("\tSet the number of neighbours used to set"				    +" the kernel bandwidth.\n"				    +"\t(default all)",				    "K", 1, "-K <number of neighbours>"));    newVector.addElement(new Option("\tSet the weighting kernel shape to use."				    +" 0=Linear, 1=Epanechnikov,\n"				    +"\t2=Tricube, 3=Inverse, 4=Gaussian.\n"				    +"\t(default 0 = Linear)",				    "U", 1,"-U <number of weighting method>"));        Enumeration enu = super.listOptions();    while (enu.hasMoreElements()) {      newVector.addElement(enu.nextElement());    }    return newVector.elements();  }  /**   * Parses a given list of options. <p/>   *   <!-- options-start -->   * Valid options are: <p/>   *    * <pre> -A   *  The nearest neighbour search algorithm to use (default: LinearNN).   * </pre>   *    * <pre> -K &lt;number of neighbours&gt;   *  Set the number of neighbours used to set the kernel bandwidth.   *  (default all)</pre>   *    * <pre> -U &lt;number of weighting method&gt;   *  Set the weighting kernel shape to use. 0=Linear, 1=Epanechnikov,   *  2=Tricube, 3=Inverse, 4=Gaussian.   *  (default 0 = Linear)</pre>   *    * <pre> -D   *  If set, classifier is run in debug mode and   *  may output additional info to the console</pre>   *    * <pre> -W   *  Full name of base classifier.   *  (default: weka.classifiers.trees.DecisionStump)</pre>   *    * <pre>    * Options specific to classifier weka.classifiers.trees.DecisionStump:   * </pre>   *    * <pre> -D   *  If set, classifier is run in debug mode and   *  may output additional info to the console</pre>   *    <!-- options-end -->   *   * @param options the list of options as an array of strings   * @throws Exception if an option is not supported   */  public void setOptions(String[] options) throws Exception {    String knnString = Utils.getOption('K', options);    if (knnString.length() != 0) {      setKNN(Integer.parseInt(knnString));    } else {      setKNN(-1);    }    String weightString = Utils.getOption('U', options);    if (weightString.length() != 0) {      setWeightingKernel(Integer.parseInt(weightString));    } else {      setWeightingKernel(LINEAR);    }        String nnSearchClass = Utils.getOption('A', options);    if(nnSearchClass.length() != 0) {      String nnSearchClassSpec[] = Utils.splitOptions(nnSearchClass);      if(nnSearchClassSpec.length == 0) {         throw new Exception("Invalid NearestNeighbourSearch algorithm " +                            "specification string.");       }      String className = nnSearchClassSpec[0];      nnSearchClassSpec[0] = "";      setNearestNeighbourSearchAlgorithm( (NearestNeighbourSearch)                  Utils.forName( NearestNeighbourSearch.class,                                  className,                                  nnSearchClassSpec)                                        );    }    else       this.setNearestNeighbourSearchAlgorithm(new LinearNN());    super.setOptions(options);  }  /**   * Gets the current settings of the classifier.   *   * @return an array of strings suitable for passing to setOptions   */  public String [] getOptions() {    String [] superOptions = super.getOptions();    String [] options = new String [superOptions.length + 6];    int current = 0;    options[current++] = "-U"; options[current++] = "" + getWeightingKernel();    if ( (getKNN() == 0) && m_UseAllK) {      options[current++] = "-K"; options[current++] = "-1";    }    else {      options[current++] = "-K"; options[current++] = "" + getKNN();    }    options[current++] = "-A";    options[current++] = m_NNSearch.getClass().getName()+" "+Utils.joinOptions(m_NNSearch.getOptions());     System.arraycopy(superOptions, 0, options, current,                     superOptions.length);

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -