⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 classifiersubseteval.java

📁 一个数据挖掘系统的源码
💻 JAVA
📖 第 1 页 / 共 2 页
字号:
    if (data.checkForStringAttributes()) {
      throw  new Exception("Can't handle string attributes!");
    }

    m_trainingInstances = data;
    m_classIndex = m_trainingInstances.classIndex();
    m_numAttribs = m_trainingInstances.numAttributes();
    m_numInstances = m_trainingInstances.numInstances();

    // load the testing data
    if (!m_useTraining &&
	(!getHoldOutFile().getPath().startsWith("Click to set"))) {
      java.io.Reader r = new java.io.BufferedReader(
			 new java.io.FileReader(getHoldOutFile().getPath()));
	m_holdOutInstances = new Instances(r);
	m_holdOutInstances.setClassIndex(m_trainingInstances.classIndex());
	if (m_trainingInstances.equalHeaders(m_holdOutInstances) == false) {
	  throw new Exception("Hold out/test set is not compatable with "
			      +"training data.");
	}
    }
  }

  /**
   * Evaluates a subset of attributes
   *
   * @param subset a bitset representing the attribute subset to be
   * evaluated
   * @exception Exception if the subset could not be evaluated
   */
  public double evaluateSubset (BitSet subset)
    throws Exception
  {
    int i,j;
    double errorRate = 0;
    int numAttributes = 0;
    Instances trainCopy=null;
    Instances testCopy=null;

    AttributeFilter delTransform = new AttributeFilter();
    delTransform.setInvertSelection(true);
    // copy the training instances
    trainCopy = new Instances(m_trainingInstances);

    if (!m_useTraining) {
      if (m_holdOutInstances == null) {
	throw new Exception("Must specify a set of hold out/test instances "
			    +"with -H");
      }
      // copy the test instances
      testCopy = new Instances(m_holdOutInstances);
    }

    // count attributes set in the BitSet
    for (i = 0; i < m_numAttribs; i++) {
      if (subset.get(i)) {
        numAttributes++;
      }
    }

    // set up an array of attribute indexes for the filter (+1 for the class)
    int[] featArray = new int[numAttributes + 1];

    for (i = 0, j = 0; i < m_numAttribs; i++) {
      if (subset.get(i)) {
        featArray[j++] = i;
      }
    }

    featArray[j] = m_classIndex;
    delTransform.setAttributeIndicesArray(featArray);
    delTransform.setInputFormat(trainCopy);
    trainCopy = Filter.useFilter(trainCopy, delTransform);
    if (!m_useTraining) {
      testCopy = Filter.useFilter(testCopy, delTransform);
    }

    // build the classifier
    m_Classifier.buildClassifier(trainCopy);

    m_Evaluation = new Evaluation(trainCopy);
    if (!m_useTraining) {
      m_Evaluation.evaluateModel(m_Classifier, testCopy);
    } else {
      m_Evaluation.evaluateModel(m_Classifier, trainCopy);
    }

    if (m_trainingInstances.classAttribute().isNominal()) {
      errorRate = m_Evaluation.errorRate();
    } else {
      errorRate = m_Evaluation.meanAbsoluteError();
    }

    // return the negative of the error rate as search methods  need to
    // maximize something
    return -errorRate;
  }

  /**
   * Evaluates a subset of attributes with respect to a set of instances.
   * Calling this function overides any test/hold out instancs set from
   * setHoldOutFile.
   * @param subset a bitset representing the attribute subset to be
   * evaluated
   * @param holdOut a set of instances (possibly seperate and distinct
   * from those use to build/train the evaluator) with which to
   * evaluate the merit of the subset
   * @return the "merit" of the subset on the holdOut data
   * @exception Exception if the subset cannot be evaluated
   */
  public double evaluateSubset(BitSet subset, Instances holdOut)
    throws Exception {
    int i,j;
    double errorRate;
    int numAttributes = 0;
    Instances trainCopy=null;
    Instances testCopy=null;

    if (m_trainingInstances.equalHeaders(holdOut) == false) {
      throw new Exception("evaluateSubset : Incompatable instance types.");
    }

    AttributeFilter delTransform = new AttributeFilter();
    delTransform.setInvertSelection(true);
    // copy the training instances
    trainCopy = new Instances(m_trainingInstances);

    testCopy = new Instances(holdOut);

    // count attributes set in the BitSet
    for (i = 0; i < m_numAttribs; i++) {
      if (subset.get(i)) {
        numAttributes++;
      }
    }

    // set up an array of attribute indexes for the filter (+1 for the class)
    int[] featArray = new int[numAttributes + 1];

    for (i = 0, j = 0; i < m_numAttribs; i++) {
      if (subset.get(i)) {
        featArray[j++] = i;
      }
    }

    featArray[j] = m_classIndex;
    delTransform.setAttributeIndicesArray(featArray);
    delTransform.setInputFormat(trainCopy);
    trainCopy = Filter.useFilter(trainCopy, delTransform);
    testCopy = Filter.useFilter(testCopy, delTransform);

    // build the classifier
    m_Classifier.buildClassifier(trainCopy);

    m_Evaluation = new Evaluation(trainCopy);
    m_Evaluation.evaluateModel(m_Classifier, testCopy);

    if (m_trainingInstances.classAttribute().isNominal()) {
      errorRate = m_Evaluation.errorRate();
    } else {
      errorRate = m_Evaluation.meanAbsoluteError();
    }

    // return the negative of the error as search methods need to
    // maximize something
   return -errorRate;
  }

  /**
   * Evaluates a subset of attributes with respect to a single instance.
   * Calling this function overides any hold out/test instances set
   * through setHoldOutFile.
   * @param subset a bitset representing the attribute subset to be
   * evaluated
   * @param holdOut a single instance (possibly not one of those used to
   * build/train the evaluator) with which to evaluate the merit of the subset
   * @param retrain true if the classifier should be retrained with respect
   * to the new subset before testing on the holdOut instance.
   * @return the "merit" of the subset on the holdOut instance
   * @exception Exception if the subset cannot be evaluated
   */
  public double evaluateSubset(BitSet subset, Instance holdOut,
			       boolean retrain)
    throws Exception {
    int i,j;
    double error;
    int numAttributes = 0;
    Instances trainCopy=null;
    Instance testCopy=null;

    if (m_trainingInstances.equalHeaders(holdOut.dataset()) == false) {
      throw new Exception("evaluateSubset : Incompatable instance types.");
    }

    AttributeFilter delTransform = new AttributeFilter();
    delTransform.setInvertSelection(true);
    // copy the training instances
    trainCopy = new Instances(m_trainingInstances);

    testCopy = new Instance(holdOut);

    // count attributes set in the BitSet
    for (i = 0; i < m_numAttribs; i++) {
      if (subset.get(i)) {
        numAttributes++;
      }
    }

    // set up an array of attribute indexes for the filter (+1 for the class)
    int[] featArray = new int[numAttributes + 1];

    for (i = 0, j = 0; i < m_numAttribs; i++) {
      if (subset.get(i)) {
        featArray[j++] = i;
      }
    }
    featArray[j] = m_classIndex;
    delTransform.setAttributeIndicesArray(featArray);
    delTransform.setInputFormat(trainCopy);

    if (retrain) {
      trainCopy = Filter.useFilter(trainCopy, delTransform);
      // build the classifier
      m_Classifier.buildClassifier(trainCopy);
    }

    delTransform.input(testCopy);
    testCopy = delTransform.output();

    double pred;
    if (m_Classifier instanceof DistributionClassifier) {
      double [] distrib;
      distrib = ((DistributionClassifier)m_Classifier).
	distributionForInstance(testCopy);
      if (m_trainingInstances.classAttribute().isNominal()) {
	pred = distrib[(int)testCopy.classValue()];
      } else {
	pred = distrib[0];
      }
    } else {
      pred = m_Classifier.classifyInstance(testCopy);
      if (m_trainingInstances.classAttribute().isNominal()) {
	pred = (pred == testCopy.classValue()) ? 1.0 : 0.0;
      }

    }

    if (m_trainingInstances.classAttribute().isNominal()) {
      error = 1.0 - pred;
    } else {
      error = testCopy.classValue() - pred;
    }

    // return the negative of the error as search methods need to
    // maximize something
    return -error;
  }

  /**
   * Returns a string describing classifierSubsetEval
   *
   * @return the description as a string
   */
  public String toString() {
    StringBuffer text = new StringBuffer();

    if (m_trainingInstances == null) {
      text.append("\tClassifier subset evaluator has not been built yet\n");
    }
    else {
      text.append("\tClassifier Subset Evaluator\n");
      text.append("\tLearning scheme: "
		  + getClassifier().getClass().getName() + "\n");
      text.append("\tScheme options: ");
      String[] classifierOptions = new String[0];

      if (m_Classifier instanceof OptionHandler) {
        classifierOptions = ((OptionHandler)m_Classifier).getOptions();

        for (int i = 0; i < classifierOptions.length; i++) {
          text.append(classifierOptions[i] + " ");
        }
      }

      text.append("\n");
      text.append("\tHold out/test set: ");
      if (!m_useTraining) {
	if (getHoldOutFile().getPath().startsWith("Click to set")) {
	  text.append("none\n");
	} else {
	  text.append(getHoldOutFile().getPath()+'\n');
	}
      } else {
	text.append("Training data\n");
      }
      if (m_trainingInstances.attribute(m_classIndex).isNumeric()) {
	text.append("\tAccuracy estimation: MAE\n");
      } else {
	text.append("\tAccuracy estimation: classification error\n");
      }
    }
    return text.toString();
  }

  /**
   * reset to defaults
   */
  protected void resetOptions () {
    m_trainingInstances = null;
    m_Evaluation = null;
    m_Classifier = new ZeroR();
    m_holdOutFile = new File("Click to set hold out or test instances");
    m_holdOutInstances = null;
    m_useTraining = false;
  }

  /**
   * Main method for testing this class.
   *
   * @param args the options
   */
  public static void main (String[] args) {
    try {
      System.out.println(AttributeSelection.
			 SelectAttributes(new ClassifierSubsetEval(), args));
    }
    catch (Exception e) {
      log.error(e.getStackTrace().toString());
      log.error(e.getMessage());
    }
  }
}

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -