⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 nd.java

📁 代码是一个分类器的实现,其中使用了部分weka的源代码。可以将项目导入eclipse运行
💻 JAVA
📖 第 1 页 / 共 2 页
字号:
/* *    This program is free software; you can redistribute it and/or modify *    it under the terms of the GNU General Public License as published by *    the Free Software Foundation; either version 2 of the License, or *    (at your option) any later version. * *    This program is distributed in the hope that it will be useful, *    but WITHOUT ANY WARRANTY; without even the implied warranty of *    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the *    GNU General Public License for more details. * *    You should have received a copy of the GNU General Public License *    along with this program; if not, write to the Free Software *    Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA. *//* *    ND.java *    Copyright (C) 2003-2005 University of Waikato * */package weka.classifiers.meta.nestedDichotomies;import weka.classifiers.Classifier;import weka.classifiers.RandomizableSingleClassifierEnhancer;import weka.classifiers.meta.FilteredClassifier;import weka.classifiers.rules.ZeroR;import weka.core.Capabilities;import weka.core.FastVector;import weka.core.Instance;import weka.core.Instances;import weka.core.TechnicalInformation;import weka.core.TechnicalInformationHandler;import weka.core.Capabilities.Capability;import weka.core.TechnicalInformation.Field;import weka.core.TechnicalInformation.Type;import weka.filters.Filter;import weka.filters.unsupervised.attribute.MakeIndicator;import weka.filters.unsupervised.instance.RemoveWithValues;import java.io.Serializable;import java.util.Hashtable;import java.util.Random;/** <!-- globalinfo-start --> * A meta classifier for handling multi-class datasets with 2-class classifiers by building a random tree structure.<br/> * <br/> * For more info, check<br/> * <br/> * Lin Dong, Eibe Frank, Stefan Kramer: Ensembles of Balanced Nested Dichotomies for Multi-class Problems. In: PKDD, 84-95, 2005.<br/> * <br/> * Eibe Frank, Stefan Kramer: Ensembles of nested dichotomies for multi-class problems. In: Twenty-first International Conference on Machine Learning, 2004. * <p/> <!-- globalinfo-end --> * <!-- technical-bibtex-start --> * BibTeX: * <pre> * &#64;inproceedings{Dong2005, *    author = {Lin Dong and Eibe Frank and Stefan Kramer}, *    booktitle = {PKDD}, *    pages = {84-95}, *    publisher = {Springer}, *    title = {Ensembles of Balanced Nested Dichotomies for Multi-class Problems}, *    year = {2005} * } *  * &#64;inproceedings{Frank2004, *    author = {Eibe Frank and Stefan Kramer}, *    booktitle = {Twenty-first International Conference on Machine Learning}, *    publisher = {ACM}, *    title = {Ensembles of nested dichotomies for multi-class problems}, *    year = {2004} * } * </pre> * <p/> <!-- technical-bibtex-end --> * <!-- options-start --> * Valid options are: <p/> *  * <pre> -S &lt;num&gt; *  Random number seed. *  (default 1)</pre> *  * <pre> -D *  If set, classifier is run in debug mode and *  may output additional info to the console</pre> *  * <pre> -W *  Full name of base classifier. *  (default: weka.classifiers.trees.J48)</pre> *  * <pre>  * Options specific to classifier weka.classifiers.trees.J48: * </pre> *  * <pre> -U *  Use unpruned tree.</pre> *  * <pre> -C &lt;pruning confidence&gt; *  Set confidence threshold for pruning. *  (default 0.25)</pre> *  * <pre> -M &lt;minimum number of instances&gt; *  Set minimum number of instances per leaf. *  (default 2)</pre> *  * <pre> -R *  Use reduced error pruning.</pre> *  * <pre> -N &lt;number of folds&gt; *  Set number of folds for reduced error *  pruning. One fold is used as pruning set. *  (default 3)</pre> *  * <pre> -B *  Use binary splits only.</pre> *  * <pre> -S *  Don't perform subtree raising.</pre> *  * <pre> -L *  Do not clean up after the tree has been built.</pre> *  * <pre> -A *  Laplace smoothing for predicted probabilities.</pre> *  * <pre> -Q &lt;seed&gt; *  Seed for random data shuffling (default 1).</pre> *  <!-- options-end --> * * @author Eibe Frank * @author Lin Dong */public class ND   extends RandomizableSingleClassifierEnhancer  implements TechnicalInformationHandler {    /** for serialization */  static final long serialVersionUID = -6355893369855683820L;  /**   * a node class   */  protected class NDTree implements Serializable {        /** The indices associated with this node */    protected FastVector m_indices = null;        /** The parent */    protected NDTree m_parent = null;        /** The left successor */    protected NDTree m_left = null;        /** The right successor */    protected NDTree m_right = null;        /**     * Constructor.     */    protected NDTree() {            m_indices = new FastVector(1);      m_indices.addElement(new Integer(Integer.MAX_VALUE));    }        /**     * Locates the node with the given index (depth-first traversal).     */    protected NDTree locateNode(int nodeIndex, int[] currentIndex) {            if (nodeIndex == currentIndex[0]) {	return this;      } else if (m_left == null) {	return null;      } else {	currentIndex[0]++;	NDTree leftresult = m_left.locateNode(nodeIndex, currentIndex);	if (leftresult != null) {	  return leftresult;	} else {	  currentIndex[0]++;	  return m_right.locateNode(nodeIndex, currentIndex);	}      }    }          /**     * Inserts a class index into the tree.      *      * @param classIndex the class index to insert     */    protected void insertClassIndex(int classIndex) {      // Create new nodes      NDTree right = new NDTree();      if (m_left != null) {	m_right.m_parent = right;	m_left.m_parent = right;	right.m_right = m_right;	right.m_left = m_left;      }      m_right = right;      m_right.m_indices = (FastVector)m_indices.copy();      m_right.m_parent = this;      m_left = new NDTree();      m_left.insertClassIndexAtNode(classIndex);      m_left.m_parent = this;       // Propagate class Index      propagateClassIndex(classIndex);    }    /**     * Propagates class index to the root.     *      * @param classIndex the index to propagate to the root     */    protected void propagateClassIndex(int classIndex) {      insertClassIndexAtNode(classIndex);      if (m_parent != null) {	m_parent.propagateClassIndex(classIndex);      }    }        /**     * Inserts the class index at a given node.     *      * @param classIndex the classIndex to insert     */    protected void insertClassIndexAtNode(int classIndex) {      int i = 0;      while (classIndex > ((Integer)m_indices.elementAt(i)).intValue()) {	i++;      }      m_indices.insertElementAt(new Integer(classIndex), i);    }    /**     * Gets the indices in an array of ints.     *      * @return the indices     */    protected int[] getIndices() {      int[] ints = new int[m_indices.size() - 1];      for (int i = 0; i < m_indices.size() - 1; i++) {	ints[i] = ((Integer)m_indices.elementAt(i)).intValue();      }      return ints;    }    /**     * Checks whether an index is in the array.     *      * @param index the index to check     * @return true of the index is in the array     */    protected boolean contains(int index) {      for (int i = 0; i < m_indices.size() - 1; i++) {	if (index == ((Integer)m_indices.elementAt(i)).intValue()) {	  return true;	}      }      return false;    }    /**     * Returns the list of indices as a string.     *      * @return the indices as string     */    protected String getString() {      StringBuffer string = new StringBuffer();      for (int i = 0; i < m_indices.size() - 1; i++) {	if (i > 0) {	  string.append(',');	}	string.append(((Integer)m_indices.elementAt(i)).intValue() + 1);      }      return string.toString();    }    /**     * Unifies tree for improve hashing.     */    protected void unifyTree() {      if (m_left != null) {        if (((Integer)m_left.m_indices.elementAt(0)).intValue() >            ((Integer)m_right.m_indices.elementAt(0)).intValue()) {          NDTree temp = m_left;          m_left = m_right;          m_right = temp;        }        m_left.unifyTree();

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -