⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 minnd.java

📁 代码是一个分类器的实现,其中使用了部分weka的源代码。可以将项目导入eclipse运行
💻 JAVA
📖 第 1 页 / 共 3 页
字号:
    // The first K nearest neighbours' predictions */  double[] predict = new double[m_NumClasses];  for(int h=0; h < predict.length; h++)    predict[h] = 0;  ex = cleanse(ex);  if(ex.relationalValue(1).numInstances() == 0){    if (getDebug())      System.out.println("???Whole exemplar falls into ambiguous area!");    return 1.0;                          // Bias towards positive class  }  double[] mean = new double[m_Dimension];	  for (int i=0; i<m_Dimension; i++)    mean [i]=ex.relationalValue(1).meanOrMode(i);  // Avoid zero sigma  for(int h=0; h < var.length; h++){    if(Utils.eq(var[h],0.0))      var[h] = m_ZERO;  }	  for(int i=0; i < m_Class.length; i++){    if(m_ValidM[i] != null)      kullback[i] = kullback(mean, m_ValidM[i], var, m_Variance[i], i);    else      kullback[i] = Double.POSITIVE_INFINITY;  }  for(int j=0; j < m_Neighbour; j++){    int pos = Utils.minIndex(kullback);    predict[(int)m_Class[pos]] += m_Weights[pos];	       kullback[pos] = Double.POSITIVE_INFINITY;  }	  if (getDebug())    System.out.println("???There are still some unambiguous instances in this exemplar! Predicted as: "+Utils.maxIndex(predict));  return (double)Utils.maxIndex(predict);	  }   /**   * Cleanse the given exemplar according to the valid and noise data   * statistics   *   * @param before the given exemplar   * @return the processed exemplar   * @throws Exception if the returned exemplar is wrong    */  public Instance cleanse(Instance before) throws Exception{    Instances insts = before.relationalValue(1).stringFreeStructure();    Instance after = new Instance (before.numAttributes());    after.setDataset(m_Attributes);    for(int g=0; g < before.relationalValue(1).numInstances(); g++){      Instance datum = before.relationalValue(1).instance(g);      double[] minNoiDists = new double[m_Choose];      double[] minValDists = new double[m_Choose];      int noiseCount = 0, validCount = 0;      double[] nDist = new double[m_Mean.length];       double[] vDist = new double[m_Mean.length];       for(int h=0; h < m_Mean.length; h++){        if(m_ValidM[h] == null)          vDist[h] = Double.POSITIVE_INFINITY;        else          vDist[h] = distance(datum, m_ValidM[h], m_ValidV[h], h);        if(m_NoiseM[h] == null)          nDist[h] = Double.POSITIVE_INFINITY;        else          nDist[h] = distance(datum, m_NoiseM[h], m_NoiseV[h], h);      }      for(int k=0; k < m_Choose; k++){        int pos = Utils.minIndex(vDist);        minValDists[k] = vDist[pos];        vDist[pos] = Double.POSITIVE_INFINITY;        pos = Utils.minIndex(nDist);        minNoiDists[k] = nDist[pos];        nDist[pos] = Double.POSITIVE_INFINITY;      }      int x = 0,y = 0;      while((x+y) < m_Choose){        if(minValDists[x] <= minNoiDists[y]){          validCount++;          x++;        }        else{          noiseCount++;          y++;        }      }      if(x >= y)        insts.add (datum);    }    after.setValue(0, before.value( 0));    after.setValue(1, after.attribute(1).addRelation(insts));    after.setValue(2, before.value( 2));    return after;  }      /**   * This function calculates the Kullback Leibler distance between   * two normal distributions.  This distance is always positive.    * Kullback Leibler distance = integral{f(X)ln(f(X)/g(X))}   * Note that X is a vector.  Since we assume dimensions are independent   * f(X)(g(X) the same) is actually the product of normal density   * functions of each dimensions.  Also note that it should be log2   * instead of (ln) in the formula, but we use (ln) simply for computational   * convenience.   *   * The result is as follows, suppose there are P dimensions, and f(X)   * is the first distribution and g(X) is the second:   * Kullback = sum[1..P](ln(SIGMA2/SIGMA1)) +   *            sum[1..P](SIGMA1^2 / (2*(SIGMA2^2))) +   *            sum[1..P]((MU1-MU2)^2 / (2*(SIGMA2^2))) -   *            P/2   *   * @param mu1 mu of the first normal distribution   * @param mu2 mu of the second normal distribution    * @param var1 variance(SIGMA^2) of the first normal distribution   * @param var2 variance(SIGMA^2) of the second normal distribution   * @return the Kullback distance of two distributions   */  public double kullback(double[] mu1, double[] mu2,      double[] var1, double[] var2, int pos){    int p = mu1.length;    double result = 0;    for(int y=0; y < p; y++){      if((Utils.gr(var1[y], 0)) && (Utils.gr(var2[y], 0))){        result +=            ((Math.log(Math.sqrt(var2[y]/var1[y]))) +           (var1[y] / (2.0*var2[y])) +            (m_Change[pos][y] * (mu1[y]-mu2[y])*(mu1[y]-mu2[y]) / (2.0*var2[y])) -           0.5);      }    }    return result;  }  /**   * Returns an enumeration describing the available options   *   * @return an enumeration of all the available options   */  public Enumeration listOptions() {    Vector result = new Vector();    result.addElement(new Option(          "\tSet number of nearest neighbour for prediction\n"          + "\t(default 1)",          "K", 1, "-K <number of neighbours>"));        result.addElement(new Option(          "\tSet number of nearest neighbour for cleansing the training data\n"          + "\t(default 1)",          "S", 1, "-S <number of neighbours>"));        result.addElement(new Option(          "\tSet number of nearest neighbour for cleansing the testing data\n"          + "\t(default 1)",          "E", 1, "-E <number of neighbours>"));    return result.elements();  }  /**   * Parses a given list of options. <p/>   *    <!-- options-start -->   * Valid options are: <p/>   *    * <pre> -K &lt;number of neighbours&gt;   *  Set number of nearest neighbour for prediction   *  (default 1)</pre>   *    * <pre> -S &lt;number of neighbours&gt;   *  Set number of nearest neighbour for cleansing the training data   *  (default 1)</pre>   *    * <pre> -E &lt;number of neighbours&gt;   *  Set number of nearest neighbour for cleansing the testing data   *  (default 1)</pre>   *    <!-- options-end -->   *   * @param options the list of options as an array of strings   * @throws Exception if an option is not supported   */  public void setOptions(String[] options) throws Exception{    setDebug(Utils.getFlag('D', options));    String numNeighbourString = Utils.getOption('K', options);    if (numNeighbourString.length() != 0)       setNumNeighbours(Integer.parseInt(numNeighbourString));    else       setNumNeighbours(1);    numNeighbourString = Utils.getOption('S', options);    if (numNeighbourString.length() != 0)       setNumTrainingNoises(Integer.parseInt(numNeighbourString));    else       setNumTrainingNoises(1);    numNeighbourString = Utils.getOption('E', options);    if (numNeighbourString.length() != 0)       setNumTestingNoises(Integer.parseInt(numNeighbourString));    else       setNumTestingNoises(1);  }  /**   * Gets the current settings of the Classifier.   *   * @return an array of strings suitable for passing to setOptions   */  public String[] getOptions() {    Vector        result;        result = new Vector();    if (getDebug())      result.add("-D");        result.add("-K");    result.add("" + getNumNeighbours());        result.add("-S");    result.add("" + getNumTrainingNoises());        result.add("-E");    result.add("" + getNumTestingNoises());    return (String[]) result.toArray(new String[result.size()]);  }  /**   * Returns the tip text for this property   *   * @return tip text for this property suitable for   * displaying in the explorer/experimenter gui   */  public String numNeighboursTipText() {    return "The number of nearest neighbours to the estimate the class prediction of test bags.";  }  /**   * Sets the number of nearest neighbours to estimate   * the class prediction of tests bags   * @param numNeighbour the number of citers   */  public void setNumNeighbours(int numNeighbour){    m_Neighbour = numNeighbour;  }  /**   * Returns the number of nearest neighbours to estimate   * the class prediction of tests bags   * @return the number of neighbours   */  public int getNumNeighbours(){    return m_Neighbour;  }  /**   * Returns the tip text for this property   *   * @return tip text for this property suitable for   * displaying in the explorer/experimenter gui   */  public String numTrainingNoisesTipText() {    return "The number of nearest neighbour instances in the selection of noises in the training data.";  }  /**   * Sets the number of nearest neighbour instances in the    * selection of noises in the training data   *    * @param numTraining the number of noises in training data    */  public void setNumTrainingNoises (int numTraining){    m_Select = numTraining;  }  /**   * Returns the number of nearest neighbour instances in the    * selection of noises in the training data   *    * @return the number of noises in training data   */  public int getNumTrainingNoises(){    return m_Select;  }  /**   * Returns the tip text for this property   *   * @return tip text for this property suitable for   * displaying in the explorer/experimenter gui   */  public String numTestingNoisesTipText() {    return "The number of nearest neighbour instances in the selection of noises in the test data.";  }  /**   * Returns The number of nearest neighbour instances in the    * selection of noises in the test data    * @return the number of noises in test data   */  public int getNumTestingNoises(){    return m_Choose;  }  /**   * Sets The number of nearest neighbour exemplars in the    * selection of noises in the test data    * @param numTesting the number of noises in test data   */  public void setNumTestingNoises (int numTesting){    m_Choose = numTesting;  }  /**   * Main method for testing.   *   * @param args the options for the classifier   */  public static void main(String[] args) {	    runClassifier(new MINND(), args);  }}

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -