⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 nd.java

📁 代码是一个分类器的实现,其中使用了部分weka的源代码。可以将项目导入eclipse运行
💻 JAVA
📖 第 1 页 / 共 2 页
字号:
        m_right.unifyTree();      }    }    /**     * Returns a description of the tree rooted at this node.     *      * @param text the buffer to add the node to     * @param id the node id     * @param level the level of the tree     */    protected void toString(StringBuffer text, int[] id, int level) {      for (int i = 0; i < level; i++) {	text.append("   | ");      }      text.append(id[0] + ": " + getString() + "\n");      if (m_left != null) {	id[0]++;	m_left.toString(text, id, level + 1);	id[0]++;	m_right.toString(text, id, level + 1);      }    }  }  /** The tree of classes */  protected NDTree m_ndtree = null;    /** The hashtable containing all the classifiers */  protected Hashtable m_classifiers = null;  /** Is Hashtable given from END? */  protected boolean m_hashtablegiven = false;      /**   * Constructor.   */  public ND() {        m_Classifier = new weka.classifiers.trees.J48();  }    /**   * String describing default classifier.   *    * @return the default classifier classname   */  protected String defaultClassifierString() {        return "weka.classifiers.trees.J48";  }  /**   * Returns an instance of a TechnicalInformation object, containing    * detailed information about the technical background of this class,   * e.g., paper reference or book this class is based on.   *    * @return the technical information about this class   */  public TechnicalInformation getTechnicalInformation() {    TechnicalInformation 	result;    TechnicalInformation 	additional;        result = new TechnicalInformation(Type.INPROCEEDINGS);    result.setValue(Field.AUTHOR, "Lin Dong and Eibe Frank and Stefan Kramer");    result.setValue(Field.TITLE, "Ensembles of Balanced Nested Dichotomies for Multi-class Problems");    result.setValue(Field.BOOKTITLE, "PKDD");    result.setValue(Field.YEAR, "2005");    result.setValue(Field.PAGES, "84-95");    result.setValue(Field.PUBLISHER, "Springer");    additional = result.add(Type.INPROCEEDINGS);    additional.setValue(Field.AUTHOR, "Eibe Frank and Stefan Kramer");    additional.setValue(Field.TITLE, "Ensembles of nested dichotomies for multi-class problems");    additional.setValue(Field.BOOKTITLE, "Twenty-first International Conference on Machine Learning");    additional.setValue(Field.YEAR, "2004");    additional.setValue(Field.PUBLISHER, "ACM");        return result;  }  /**   * Set hashtable from END.   *    * @param table the hashtable to use   */  public void setHashtable(Hashtable table) {    m_hashtablegiven = true;    m_classifiers = table;  }  /**   * Returns default capabilities of the classifier.   *   * @return      the capabilities of this classifier   */  public Capabilities getCapabilities() {    Capabilities result = super.getCapabilities();    // class    result.disableAllClasses();    result.enable(Capability.NOMINAL_CLASS);    result.enable(Capability.MISSING_CLASS_VALUES);    // instances    result.setMinimumNumberInstances(1);        return result;  }  /**   * Builds the classifier.   *    * @param data the data to train the classifier with   * @throws Exception if anything goes wrong   */  public void buildClassifier(Instances data) throws Exception {    // can classifier handle the data?    getCapabilities().testWithFail(data);    // remove instances with missing class    data = new Instances(data);    data.deleteWithMissingClass();        Random random = data.getRandomNumberGenerator(m_Seed);    if (!m_hashtablegiven) {      m_classifiers = new Hashtable();    }    // Generate random class hierarchy    int[] indices = new int[data.numClasses()];    for (int i = 0; i < indices.length; i++) {      indices[i] = i;    }    // Randomize list of class indices    for (int i = indices.length - 1; i > 0; i--) {      int help = indices[i];      int index = random.nextInt(i + 1);      indices[i] = indices[index];      indices[index] = help;    }    // Insert random class index at randomly chosen node    m_ndtree = new NDTree();    m_ndtree.insertClassIndexAtNode(indices[0]);    for (int i = 1; i < indices.length; i++) {      int nodeIndex = random.nextInt(2 * i - 1);           NDTree node = m_ndtree.locateNode(nodeIndex, new int[1]);      node.insertClassIndex(indices[i]);    }    m_ndtree.unifyTree();        // Build classifiers    buildClassifierForNode(m_ndtree, data);  }  /**   * Builds the classifier for one node.   *    * @param node the node to build the classifier for   * @param data the data to work with   * @throws Exception if anything goes wrong   */  public void buildClassifierForNode(NDTree node, Instances data) throws Exception {    // Are we at a leaf node ?    if (node.m_left != null) {            // Create classifier      MakeIndicator filter = new MakeIndicator();      filter.setAttributeIndex("" + (data.classIndex() + 1));      filter.setValueIndices(node.m_right.getString());      filter.setNumeric(false);      filter.setInputFormat(data);      FilteredClassifier classifier = new FilteredClassifier();      if (data.numInstances() > 0) {	classifier.setClassifier(Classifier.makeCopies(m_Classifier, 1)[0]);      } else {	classifier.setClassifier(new ZeroR());      }      classifier.setFilter(filter);            if (!m_classifiers.containsKey(node.m_left.getString() + "|" + node.m_right.getString())) {	classifier.buildClassifier(data);	m_classifiers.put(node.m_left.getString() + "|" + node.m_right.getString(), classifier);      } else {	classifier=(FilteredClassifier)m_classifiers.get(node.m_left.getString() + "|" + 							 node.m_right.getString());      }            // Generate successors      if (node.m_left.m_left != null) {        RemoveWithValues rwv = new RemoveWithValues();        rwv.setInvertSelection(true);        rwv.setNominalIndices(node.m_left.getString());        rwv.setAttributeIndex("" + (data.classIndex() + 1));        rwv.setInputFormat(data);        Instances firstSubset = Filter.useFilter(data, rwv);        buildClassifierForNode(node.m_left, firstSubset);      }      if (node.m_right.m_left != null) {        RemoveWithValues rwv = new RemoveWithValues();        rwv.setInvertSelection(true);        rwv.setNominalIndices(node.m_right.getString());        rwv.setAttributeIndex("" + (data.classIndex() + 1));        rwv.setInputFormat(data);        Instances secondSubset = Filter.useFilter(data, rwv);        buildClassifierForNode(node.m_right, secondSubset);      }    }  }      /**   * Predicts the class distribution for a given instance   *   * @param inst the (multi-class) instance to be classified   * @return the class distribution   * @throws Exception if computing fails   */  public double[] distributionForInstance(Instance inst) throws Exception {	    return distributionForInstance(inst, m_ndtree);  }  /**   * Predicts the class distribution for a given instance   *   * @param inst the (multi-class) instance to be classified   * @param node the node to do get the distribution for   * @return the class distribution   * @throws Exception if computing fails   */  protected double[] distributionForInstance(Instance inst, NDTree node) throws Exception {    double[] newDist = new double[inst.numClasses()];    if (node.m_left == null) {      newDist[node.getIndices()[0]] = 1.0;      return newDist;    } else {      Classifier classifier = (Classifier)m_classifiers.get(node.m_left.getString() + "|" +							    node.m_right.getString());      double[] leftDist = distributionForInstance(inst, node.m_left);      double[] rightDist = distributionForInstance(inst, node.m_right);      double[] dist = classifier.distributionForInstance(inst);      for (int i = 0; i < inst.numClasses(); i++) {	if (node.m_right.contains(i)) {	  newDist[i] = dist[1] * rightDist[i];	} else {	  newDist[i] = dist[0] * leftDist[i];	}      }      return newDist;    }  }  /**   * Outputs the classifier as a string.   *    * @return a string representation of the classifier   */  public String toString() {	    if (m_classifiers == null) {      return "ND: No model built yet.";    }    StringBuffer text = new StringBuffer();    text.append("ND\n\n");    m_ndtree.toString(text, new int[1], 0);	    return text.toString();  }	  /**   * @return a description of the classifier suitable for   * displaying in the explorer/experimenter gui   */  public String globalInfo() {	        return         "A meta classifier for handling multi-class datasets with 2-class "      + "classifiers by building a random tree structure.\n\n"      + "For more info, check\n\n"      + getTechnicalInformation().toString();  }      /**   * Main method for testing this class.   *   * @param argv the options   */  public static void main(String [] argv) {    runClassifier(new ND(), argv);  }}

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -