⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 ridor.java

📁 代码是一个分类器的实现,其中使用了部分weka的源代码。可以将项目导入eclipse运行
💻 JAVA
📖 第 1 页 / 共 4 页
字号:
	          return splitData;    }	    /**     * Whether the instance is covered by this antecedent     *      * @param inst the instance in question     * @return the boolean value indicating whether the instance is covered      *         by this antecedent     */    public boolean isCover(Instance inst){      boolean isCover=false;      if(!inst.isMissing(att)){	if(Utils.eq(inst.value(att), value))	  isCover=true;	          }      return isCover;    }	    /**     * Prints this antecedent     *     * @return a textual description of this antecedent     */    public String toString() {      return (att.name() + " = " +att.value((int)value));    }   }  /**   * Returns default capabilities of the classifier.   *   * @return      the capabilities of this classifier   */  public Capabilities getCapabilities() {    Capabilities result = super.getCapabilities();    // attributes    result.enable(Capability.NOMINAL_ATTRIBUTES);    result.enable(Capability.NUMERIC_ATTRIBUTES);    result.enable(Capability.DATE_ATTRIBUTES);    result.enable(Capability.MISSING_VALUES);    // class    result.enable(Capability.NOMINAL_CLASS);    result.enable(Capability.MISSING_CLASS_VALUES);        return result;  }  /**   * Builds a ripple-down manner rule learner.   *   * @param instances the training data   * @throws Exception if classifier can't be built successfully   */  public void buildClassifier(Instances instances) throws Exception {    // can classifier handle the data?    getCapabilities().testWithFail(instances);    // remove instances with missing class    Instances data = new Instances(instances);    data.deleteWithMissingClass();        int numCl = data.numClasses();    m_Root = new Ridor_node();    m_Class = instances.classAttribute();     // The original class label	    int index = data.classIndex();    m_Cover = data.sumOfWeights();	    /* Create a binary attribute */    FastVector binary_values = new FastVector(2);    binary_values.addElement("otherClasses");    binary_values.addElement("defClass");    Attribute attr = new Attribute ("newClass", binary_values);    data.insertAttributeAt(attr, index);	    data.setClassIndex(index);                 // The new class label    /* Partition the data into bags according to their original class values */    Instances[] dataByClass = new Instances[numCl];    for(int i=0; i < numCl; i++)      dataByClass[i] = new Instances(data, data.numInstances()); // Empty bags    for(int i=0; i < data.numInstances(); i++){ // Partitioning      Instance inst = data.instance(i);      inst.setClassValue(0);           // Set new class vaue to be 0      dataByClass[(int)inst.value(index+1)].add(inst);     }		    for(int i=0; i < numCl; i++)          dataByClass[i].deleteAttributeAt(index+1);   // Delete original class	    m_Root.findRules(dataByClass, 0);      }      /**   * Classify the test instance with the rule learner    *   * @param datum the instance to be classified   * @return the classification   */  public double classifyInstance(Instance datum){    return classify(m_Root, datum);  }      /**   * Classify the test instance with one node of Ridor    *   * @param node the node of Ridor to classify the test instance   * @param datum the instance to be classified   * @return the classification   */  private double classify(Ridor_node node, Instance datum){    double classValue = node.getDefClass();    RidorRule[] rules = node.getRules();    if(rules != null){      Ridor_node[] excepts = node.getExcepts();	      for(int i=0; i < excepts.length; i++){	if(rules[i].isCover(datum)){	  classValue = classify(excepts[i], datum);	  break;	}      }    }	    return classValue;  }  /**   * Returns an enumeration describing the available options   * Valid options are: <p>   *   * -F number <br>   * Set number of folds for reduced error pruning. One fold is   * used as the pruning set. (Default: 3) <p>   *   * -S number <br>   * Set number of shuffles for randomization. (Default: 10) <p>   *    * -A <br>   * Set flag of whether use the error rate of all the data to select   * the default class in each step. If not set, the learner will only use   * the error rate in the pruning data <p>   *   * -M <br>   * Set flag of whether use the majority class as the default class   * in each step instead of choosing default class based on the error rate   * (if the flag is not set) <p>     *    * -N number <br>   * Set the minimal weights of instances within a split.   * (Default: 2) <p>   *       * @return an enumeration of all the available options   */  public Enumeration listOptions() {    Vector newVector = new Vector(5);	    newVector.addElement(new Option("\tSet number of folds for IREP\n" +				    "\tOne fold is used as pruning set.\n" +				    "\t(default 3)","F", 1, "-F <number of folds>"));    newVector.addElement(new Option("\tSet number of shuffles to randomize\n" +				    "\tthe data in order to get better rule.\n" +				    "\t(default 10)","S", 1, "-S <number of shuffles>"));    newVector.addElement(new Option("\tSet flag of whether use the error rate \n"+				    "\tof all the data to select the default class\n"+				    "\tin each step. If not set, the learner will only use"+				    "\tthe error rate in the pruning data","A", 0, "-A"));    newVector.addElement(new Option("\t Set flag of whether use the majority class as\n"+				    "\tthe default class in each step instead of \n"+				    "\tchoosing default class based on the error rate\n"+				    "\t(if the flag is not set)","M", 0, "-M"));    newVector.addElement(new Option("\tSet the minimal weights of instances\n" +				    "\twithin a split.\n" +				    "\t(default 2.0)","N", 1, "-N <min. weights>"));		    return newVector.elements();  }      /**   * Parses a given list of options. <p/>   *    <!-- options-start -->   * Valid options are: <p/>   *    * <pre> -F &lt;number of folds&gt;   *  Set number of folds for IREP   *  One fold is used as pruning set.   *  (default 3)</pre>   *    * <pre> -S &lt;number of shuffles&gt;   *  Set number of shuffles to randomize   *  the data in order to get better rule.   *  (default 10)</pre>   *    * <pre> -A   *  Set flag of whether use the error rate    *  of all the data to select the default class   *  in each step. If not set, the learner will only use the error rate in the pruning data</pre>   *    * <pre> -M   *   Set flag of whether use the majority class as   *  the default class in each step instead of    *  choosing default class based on the error rate   *  (if the flag is not set)</pre>   *    * <pre> -N &lt;min. weights&gt;   *  Set the minimal weights of instances   *  within a split.   *  (default 2.0)</pre>   *    <!-- options-end -->   *   * @param options the list of options as an array of strings   * @throws Exception if an option is not supported   */  public void setOptions(String[] options) throws Exception {	    String numFoldsString = Utils.getOption('F', options);    if (numFoldsString.length() != 0)       m_Folds = Integer.parseInt(numFoldsString);    else       m_Folds = 3;	    String numShuffleString = Utils.getOption('S', options);    if (numShuffleString.length() != 0)       m_Shuffle = Integer.parseInt(numShuffleString);    else       m_Shuffle = 1;    String seedString = Utils.getOption('s', options);    if (seedString.length() != 0)       m_Seed = Integer.parseInt(seedString);    else       m_Seed = 1;	    String minNoString = Utils.getOption('N', options);    if (minNoString.length() != 0)       m_MinNo = Double.parseDouble(minNoString);    else       m_MinNo = 2.0;	    m_IsAllErr = Utils.getFlag('A', options);    m_IsMajority = Utils.getFlag('M', options);  }      /**   * Gets the current settings of the Classifier.   *   * @return an array of strings suitable for passing to setOptions   */  public String [] getOptions() {	    String [] options = new String [8];    int current = 0;    options[current++] = "-F"; options[current++] = "" + m_Folds;    options[current++] = "-S"; options[current++] = "" + m_Shuffle;    options[current++] = "-N"; options[current++] = "" + m_MinNo;	    if(m_IsAllErr)      options[current++] = "-A";    if(m_IsMajority)      options[current++] = "-M";	    while (current < options.length)       options[current++] = "";    return options;  }      /** Set and get members for parameters */  /**   * Returns the tip text for this property   * @return tip text for this property suitable for   * displaying in the explorer/experimenter gui   */  public String foldsTipText() {    return "Determines the amount of data used for pruning. One fold is used for "      + "pruning, the rest for growing the rules.";  }  public void setFolds(int fold){ m_Folds = fold; }  public int getFolds(){ return m_Folds; }  /**   * Returns the tip text for this property   * @return tip text for this property suitable for   * displaying in the explorer/experimenter gui   */  public String shuffleTipText() {    return "Determines how often the data is shuffled before a rule "      + "is chosen. If > 1, a rule is learned multiple times and the "      + "most accurate rule is chosen.";  }  public void setShuffle(int sh){ m_Shuffle = sh; }  public int getShuffle(){ return m_Shuffle; }  /**   * Returns the tip text for this property   * @return tip text for this property suitable for   * displaying in the explorer/experimenter gui   */  public String seedTipText() {    return "The seed used for randomizing the data.";  }  public void setSeed(int s){ m_Seed = s; }  public int getSeed(){ return m_Seed; }  /**   * Returns the tip text for this property   * @return tip text for this property suitable for   * displaying in the explorer/experimenter gui   */  public String wholeDataErrTipText() {    return "Whether worth of rule is computed based on all the data "      + "or just based on data covered by rule.";  }  public void setWholeDataErr(boolean a){ m_IsAllErr = a; }  public boolean getWholeDataErr(){ return m_IsAllErr; }  /**   * Returns the tip text for this property   * @return tip text for this property suitable for   * displaying in the explorer/experimenter gui   */  public String majorityClassTipText() {    return "Whether the majority class is used as default.";  }  public void setMajorityClass(boolean m){ m_IsMajority = m; }  public boolean getMajorityClass(){ return m_IsMajority; }  /**   * Returns the tip text for this property   * @return tip text for this property suitable for   * displaying in the explorer/experimenter gui   */  public String minNoTipText() {    return "The minimum total weight of the instances in a rule.";  }  public void setMinNo(double m){  m_MinNo = m; }  public double getMinNo(){ return m_MinNo; }      /**   * Returns an enumeration of the additional measure names   * @return an enumeration of the measure names   */  public Enumeration enumerateMeasures() {    Vector newVector = new Vector(1);    newVector.addElement("measureNumRules");    return newVector.elements();  }      /**   * Returns the value of the named measure   * @param additionalMeasureName the name of the measure to query for its value   * @return the value of the named measure   * @throws IllegalArgumentException if the named measure is not supported   */  public double getMeasure(String additionalMeasureName) {    if (additionalMeasureName.compareToIgnoreCase("measureNumRules") == 0)       return numRules();    else       throw new IllegalArgumentException(additionalMeasureName+" not supported (Ripple down rule learner)");  }        /**   * Measure the number of rules in total in the model   *   * @return the number of rules   */    private double numRules(){    int size = 0;    if(m_Root != null)      size = m_Root.size();	    return (double)(size+1); // Add the default rule  }     /**   * Prints the all the rules of the rule learner.   *   * @return a textual description of the classifier   */  public String toString() {    if (m_Root == null)       return "RIpple DOwn Rule Learner(Ridor): No model built yet.";	    return ("RIpple DOwn Rule Learner(Ridor) rules\n"+	    "--------------------------------------\n\n" + 	    m_Root.toString() +	    "\nTotal number of rules (incl. the default rule): " + (int)numRules());  }      /**   * Main method.   *   * @param args the options for the classifier   */  public static void main(String[] args) {	    runClassifier(new Ridor(), args);  }}

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -