⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 checkclassifier.java

📁 一个数据挖掘软件ALPHAMINERR的整个过程的JAVA版源代码
💻 JAVA
📖 第 1 页 / 共 4 页
字号:
   * test (likelihood depends on the classifier).
   *
   * @param nominalPredictor if true use nominal predictor attributes
   * @param numericPredictor if true use numeric predictor attributes
   * @param numericClass if true use a numeric class attribute otherwise a
   * nominal class attribute
   * @return true if the test was passed
   */
  protected boolean updatingEquality(boolean nominalPredictor,
				     boolean numericPredictor, 
				     boolean numericClass) {

    System.out.print("incremental training produces the same results"
		     + " as batch training");
    printAttributeSummary(nominalPredictor, numericPredictor, numericClass);
    System.out.print("...");
    int numTrain = 20, numTest = 20, numClasses = 2, missingLevel = 0;
    boolean predictorMissing = false, classMissing = false;

    Instances train = null;
    Instances test = null;
    Classifier [] classifiers = null;
    Evaluation evaluationB = null;
    Evaluation evaluationI = null;
    boolean built = false;
    try {
      train = makeTestDataset(42, numTrain, 
			      nominalPredictor ? 2 : 0,
			      numericPredictor ? 1 : 0, 
			      numClasses, 
			      numericClass);
      test = makeTestDataset(24, numTest,
			     nominalPredictor ? 2 : 0,
			     numericPredictor ? 1 : 0, 
			     numClasses, 
			     numericClass);
      if (nominalPredictor) {
	train.deleteAttributeAt(0);
	test.deleteAttributeAt(0);
      }
      if (missingLevel > 0) {
	addMissing(train, missingLevel, predictorMissing, classMissing);
	addMissing(test, Math.min(missingLevel, 50), predictorMissing, 
		   classMissing);
      }
      classifiers = Classifier.makeCopies(getClassifier(), 2);
      evaluationB = new Evaluation(train);
      evaluationI = new Evaluation(train);
      classifiers[0].buildClassifier(train);
      testWRTZeroR(classifiers[0], evaluationB, train, test);
    } catch (Exception ex) {
      throw new Error("Error setting up for tests: " + ex.getMessage());
    }
    try {
      classifiers[1].buildClassifier(new Instances(train, 0));
      for (int i = 0; i < train.numInstances(); i++) {
	((UpdateableClassifier)classifiers[1]).updateClassifier(
             train.instance(i));
      }
      built = true;
      testWRTZeroR(classifiers[1], evaluationI, train, test);
      if (!evaluationB.equals(evaluationI)) {
	System.out.println("no");
	if (m_Debug) {
	  System.out.println("\n=== Full Report ===");
	  System.out.println("Results differ between batch and "
			     + "incrementally built models.\n"
			     + "Depending on the classifier, this may be OK");
	  System.out.println("Here are the results:\n");
	  System.out.println(evaluationB.toSummaryString(
			     "\nbatch built results\n", true));
	  System.out.println(evaluationI.toSummaryString(
                             "\nincrementally built results\n", true));
	  System.out.println("Here are the datasets:\n");
	  System.out.println("=== Train Dataset ===\n"
			     + train.toString() + "\n");
	  System.out.println("=== Test Dataset ===\n"
			     + test.toString() + "\n\n");
	}
	return false;
      }
      System.out.println("yes");
      return true;
    } catch (Exception ex) {
      System.out.print("Problem during");
      if (built) {
	System.out.print(" testing");
      } else {
	System.out.print(" training");
      }
      System.out.println(": " + ex.getMessage() + "\n");
    }
    return false;
  }

  /**
   * Checks whether the classifier erroneously uses the class
   * value of test instances (if provided). Runs the classifier with
   * test instance class values set to missing and compares with results
   * when test instance class values are left intact.
   *
   * @param nominalPredictor if true use nominal predictor attributes
   * @param numericPredictor if true use numeric predictor attributes
   * @param numericClass if true use a numeric class attribute otherwise a
   * nominal class attribute
   * @return true if the test was passed
   */
  protected boolean doesntUseTestClassVal(boolean nominalPredictor,
					  boolean numericPredictor, 
					  boolean numericClass) {

    System.out.print("classifier ignores test instance class vals");
    printAttributeSummary(nominalPredictor, numericPredictor, numericClass);
    System.out.print("...");
    int numTrain = 40, numTest = 20, numClasses = 2, missingLevel = 0;
    boolean predictorMissing = false, classMissing = false;

    Instances train = null;
    Instances test = null;
    Classifier [] classifiers = null;
    Evaluation evaluationB = null;
    Evaluation evaluationI = null;
    boolean evalFail = false;
    try {
      train = makeTestDataset(43, numTrain, 
			      nominalPredictor ? 3 : 0,
			      numericPredictor ? 2 : 0, 
			      numClasses, 
			      numericClass);
      test = makeTestDataset(24, numTest,
			     nominalPredictor ? 3 : 0,
			     numericPredictor ? 2 : 0, 
			     numClasses, 
			     numericClass);
      if (nominalPredictor) {
	train.deleteAttributeAt(0);
	test.deleteAttributeAt(0);
      }
      if (missingLevel > 0) {
	addMissing(train, missingLevel, predictorMissing, classMissing);
	addMissing(test, Math.min(missingLevel, 50), predictorMissing, 
		   classMissing);
      }
      classifiers = Classifier.makeCopies(getClassifier(), 2);
      evaluationB = new Evaluation(train);
      evaluationI = new Evaluation(train);
      classifiers[0].buildClassifier(train);
      classifiers[1].buildClassifier(train);
    } catch (Exception ex) {
      throw new Error("Error setting up for tests: " + ex.getMessage());
    }
    try {

      // Now set test values to missing when predicting
      for (int i = 0; i < test.numInstances(); i++) {
	Instance testInst = test.instance(i);
	Instance classMissingInst = (Instance)testInst.copy();
        classMissingInst.setDataset(test);
	classMissingInst.setClassMissing();
	double [] dist0 = classifiers[0].distributionForInstance(testInst);
	double [] dist1 = classifiers[1].distributionForInstance(classMissingInst);
	for (int j = 0; j < dist0.length; j++) {
	  if (dist0[j] != dist1[j]) {
	    throw new Exception("Prediction different for instance " 
				+ (i + 1));
	  }
	}
      }
      System.out.println("yes");
      return true;
    } catch (Exception ex) {
      System.out.println("no");
      if (m_Debug) {
	System.out.println("\n=== Full Report ===");
	
	if (evalFail) {
	  System.out.println("Results differ between non-missing and "
			     + "missing test class values.");
	} else {
	  System.out.print("Problem during testing");
	  System.out.println(": " + ex.getMessage() + "\n");
	}
	System.out.println("Here are the datasets:\n");
	System.out.println("=== Train Dataset ===\n"
			   + train.toString() + "\n");
	System.out.println("=== Train Weights ===\n");
	for (int i = 0; i < train.numInstances(); i++) {
	  System.out.println(" " + (i + 1) 
			     + "    " + train.instance(i).weight());
	}
	System.out.println("=== Test Dataset ===\n"
			   + test.toString() + "\n\n");	
	System.out.println("(test weights all 1.0\n");
      }
    }
    return false;
  }

  /**
   * Checks whether the classifier can handle instance weights.
   * This test compares the classifier performance on two datasets
   * that are identical except for the training weights. If the 
   * results change, then the classifier must be using the weights. It
   * may be possible to get a false positive from this test if the 
   * weight changes aren't significant enough to induce a change
   * in classifier performance (but the weights are chosen to minimize
   * the likelihood of this).
   *
   * @param nominalPredictor if true use nominal predictor attributes
   * @param numericPredictor if true use numeric predictor attributes
   * @param numericClass if true use a numeric class attribute otherwise a
   * nominal class attribute
   * @return true if the test was passed
   */
  protected boolean instanceWeights(boolean nominalPredictor,
				    boolean numericPredictor, 
				    boolean numericClass) {

    System.out.print("classifier uses instance weights");
    printAttributeSummary(nominalPredictor, numericPredictor, numericClass);
    System.out.print("...");
    int numTrain = 40, numTest = 20, numClasses = 2, missingLevel = 0;
    boolean predictorMissing = false, classMissing = false;

    Instances train = null;
    Instances test = null;
    Classifier [] classifiers = null;
    Evaluation evaluationB = null;
    Evaluation evaluationI = null;
    boolean built = false;
    boolean evalFail = false;
    try {
      train = makeTestDataset(43, numTrain, 
			      nominalPredictor ? 3 : 0,
			      numericPredictor ? 2 : 0, 
			      numClasses, 
			      numericClass);
      test = makeTestDataset(24, numTest,
			     nominalPredictor ? 3 : 0,
			     numericPredictor ? 2 : 0, 
			     numClasses, 
			     numericClass);
      if (nominalPredictor) {
	train.deleteAttributeAt(0);
	test.deleteAttributeAt(0);
      }
      if (missingLevel > 0) {
	addMissing(train, missingLevel, predictorMissing, classMissing);
	addMissing(test, Math.min(missingLevel, 50), predictorMissing, 
		   classMissing);
      }
      classifiers = Classifier.makeCopies(getClassifier(), 2);
      evaluationB = new Evaluation(train);
      evaluationI = new Evaluation(train);
      classifiers[0].buildClassifier(train);
      testWRTZeroR(classifiers[0], evaluationB, train, test);
    } catch (Exception ex) {
      throw new Error("Error setting up for tests: " + ex.getMessage());
    }
    try {

      // Now modify instance weights and re-built/test
      for (int i = 0; i < train.numInstances(); i++) {
	train.instance(i).setWeight(0);
      }
      Random random = new Random(1);
      for (int i = 0; i < train.numInstances() / 2; i++) {
	int inst = Math.abs(random.nextInt()) % train.numInstances();
	int weight = Math.abs(random.nextInt()) % 10 + 1;
	train.instance(inst).setWeight(weight);
      }
      classifiers[1].buildClassifier(train);
      built = true;
      testWRTZeroR(classifiers[1], evaluationI, train, test);
      if (evaluationB.equals(evaluationI)) {
	//	System.out.println("no");
	evalFail = true;
	throw new Exception("evalFail");
      }
      System.out.println("yes");
      return true;
    } catch (Exception ex) {
      System.out.println("no");
      if (m_Debug) {
	System.out.println("\n=== Full Report ===");
	
	if (evalFail) {
	  System.out.println("Results don't differ between non-weighted and "
			     + "weighted instance models.");
	  System.out.println("Here are the results:\n");
	  System.out.println(evaluationB.toSummaryString("\nboth methods\n",
							 true));
	} else {
	  System.out.print("Problem during");
	  if (built) {
	    System.out.print(" testing");
	  } else {
	    System.out.print(" training");
	  }
	  System.out.println(": " + ex.getMessage() + "\n");
	}
	System.out.println("Here are the datasets:\n");
	System.out.println("=== Train Dataset ===\n"
			   + train.toString() + "\n");
	System.out.println("=== Train Weights ===\n");
	for (int i = 0; i < train.numInstances(); i++) {
	  System.out.println(" " + (i + 1) 
			     + "    " + train.instance(i).weight());
	}
	System.out.println("=== Test Dataset ===\n"
			   + test.toString() + "\n\n");	
	System.out.println("(test weights all 1.0\n");
      }
    }
    return false;
  }

  /**
   * Checks whether the scheme alters the training dataset during
   * training. If the scheme needs to modify the training
   * data it should take a copy of the training data. Currently checks
   * for changes to header structure, number of instances, order of
   * instances, instance weights.
   *
   * @param nominalPredictor if true use nominal predictor attributes
   * @param numericPredictor if true use numeric predictor attributes
   * @param numericClass if true use a numeric class attribute otherwise a
   * nominal class attribute
   * @param predictorMissing true if we know the classifier can handle
   * (at least) moderate missing predictor values
   * @param classMissing true if we know the classifier can handle
   * (at least) moderate missing class values
   * @return true if the test was passed
   */
  protected boolean datasetIntegrity(boolean nominalPredictor,
				     boolean numericPredictor, 
				     boolean numericClass,
				     boolean predictorMissing,
				     boolean classMissing) {

    System.out.print("classifier doesn't alter original datasets");
    printAttributeSummary(nominalPredictor, numericPredictor, numericClass);
    System.out.print("...");
    int numTrain = 20, numTest = 20, numClasses = 2, missingLevel = 20;

    Instances train = null;
    Instances test = null;
    Classifier classifier = null;
    Evaluation evaluation = null;
    boolean built = false;
    try {
      train = makeTestDataset(42, numTrain, 
			      nominalPredictor ? 2 : 0,
			      numericPredictor ? 1 : 0, 
			      numClasses, 
			      numericClass);
      test = makeTestDataset(24, numTest,
			     nominalPredictor ? 2 : 0,

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -