📄 evaluation.java
字号:
text.append("Mean absolute error ");
text.append(Utils.doubleToString(meanAbsoluteError(), 12, 4)
+ "\n");
text.append("Root mean squared error ");
text.append(Utils.
doubleToString(rootMeanSquaredError(), 12, 4)
+ "\n");
if (!m_NoPriors) {
text.append("Relative absolute error ");
text.append(Utils.doubleToString(relativeAbsoluteError(),
12, 4) + " %\n");
text.append("Root relative squared error ");
text.append(Utils.doubleToString(rootRelativeSquaredError(),
12, 4) + " %\n");
}
}
if (Utils.gr(unclassified(), 0)) {
text.append("UnClassified Instances ");
text.append(Utils.doubleToString(unclassified(), 12,4) + " " +
Utils.doubleToString(pctUnclassified(),
12, 4) + " %\n");
}
text.append("Total Number of Instances ");
text.append(Utils.doubleToString(m_WithClass, 12, 4) + "\n");
if (m_MissingClass > 0) {
text.append("Ignored Class Unknown Instances ");
text.append(Utils.doubleToString(m_MissingClass, 12, 4) + "\n");
}
} catch (Exception ex) {
// Should never occur since the class is known to be nominal
// here
System.err.println("Arggh - Must be a bug in Evaluation class");
}
return text.toString();
}
/**
* Calls toMatrixString() with a default title.
*
* @return the confusion matrix as a string
* @throws Exception if the class is numeric
*/
public String toMatrixString() throws Exception {
return toMatrixString("=== Confusion Matrix ===\n");
}
/**
* Outputs the performance statistics as a classification confusion
* matrix. For each class value, shows the distribution of
* predicted class values.
*
* @param title the title for the confusion matrix
* @return the confusion matrix as a String
* @throws Exception if the class is numeric
*/
public String toMatrixString(String title) throws Exception {
StringBuffer text = new StringBuffer();
char [] IDChars = {'a','b','c','d','e','f','g','h','i','j',
'k','l','m','n','o','p','q','r','s','t',
'u','v','w','x','y','z'};
int IDWidth;
boolean fractional = false;
if (!m_ClassIsNominal) {
throw new Exception("Evaluation: No confusion matrix possible!");
}
// Find the maximum value in the matrix
// and check for fractional display requirement
double maxval = 0;
for(int i = 0; i < m_NumClasses; i++) {
for(int j = 0; j < m_NumClasses; j++) {
double current = m_ConfusionMatrix[i][j];
if (current < 0) {
current *= -10;
}
if (current > maxval) {
maxval = current;
}
double fract = current - Math.rint(current);
if (!fractional
&& ((Math.log(fract) / Math.log(10)) >= -2)) {
fractional = true;
}
}
}
IDWidth = 1 + Math.max((int)(Math.log(maxval) / Math.log(10)
+ (fractional ? 3 : 0)),
(int)(Math.log(m_NumClasses) /
Math.log(IDChars.length)));
text.append(title).append("\n");
for(int i = 0; i < m_NumClasses; i++) {
if (fractional) {
text.append(" ").append(num2ShortID(i,IDChars,IDWidth - 3))
.append(" ");
} else {
text.append(" ").append(num2ShortID(i,IDChars,IDWidth));
}
}
text.append(" <-- classified as\n");
for(int i = 0; i< m_NumClasses; i++) {
for(int j = 0; j < m_NumClasses; j++) {
text.append(" ").append(
Utils.doubleToString(m_ConfusionMatrix[i][j],
IDWidth,
(fractional ? 2 : 0)));
}
text.append(" | ").append(num2ShortID(i,IDChars,IDWidth))
.append(" = ").append(m_ClassNames[i]).append("\n");
}
return text.toString();
}
/**
* Generates a breakdown of the accuracy for each class (with default title),
* incorporating various information-retrieval statistics, such as
* true/false positive rate, precision/recall/F-Measure. Should be
* useful for ROC curves, recall/precision curves.
*
* @return the statistics presented as a string
* @throws Exception if class is not nominal
*/
public String toClassDetailsString() throws Exception {
return toClassDetailsString("=== Detailed Accuracy By Class ===\n");
}
/**
* Generates a breakdown of the accuracy for each class,
* incorporating various information-retrieval statistics, such as
* true/false positive rate, precision/recall/F-Measure. Should be
* useful for ROC curves, recall/precision curves.
*
* @param title the title to prepend the stats string with
* @return the statistics presented as a string
* @throws Exception if class is not nominal
*/
public String toClassDetailsString(String title) throws Exception {
if (!m_ClassIsNominal) {
throw new Exception("Evaluation: No confusion matrix possible!");
}
StringBuffer text = new StringBuffer(title
+ "\nTP Rate FP Rate"
+ " Precision Recall"
+ " F-Measure Class\n");
for(int i = 0; i < m_NumClasses; i++) {
text.append(Utils.doubleToString(truePositiveRate(i), 7, 3))
.append(" ");
text.append(Utils.doubleToString(falsePositiveRate(i), 7, 3))
.append(" ");
text.append(Utils.doubleToString(precision(i), 7, 3))
.append(" ");
text.append(Utils.doubleToString(recall(i), 7, 3))
.append(" ");
text.append(Utils.doubleToString(fMeasure(i), 7, 3))
.append(" ");
text.append(m_ClassNames[i]).append('\n');
}
return text.toString();
}
/**
* Calculate the number of true positives with respect to a particular class.
* This is defined as<p/>
* <pre>
* correctly classified positives
* </pre>
*
* @param classIndex the index of the class to consider as "positive"
* @return the true positive rate
*/
public double numTruePositives(int classIndex) {
double correct = 0;
for (int j = 0; j < m_NumClasses; j++) {
if (j == classIndex) {
correct += m_ConfusionMatrix[classIndex][j];
}
}
return correct;
}
/**
* Calculate the true positive rate with respect to a particular class.
* This is defined as<p/>
* <pre>
* correctly classified positives
* ------------------------------
* total positives
* </pre>
*
* @param classIndex the index of the class to consider as "positive"
* @return the true positive rate
*/
public double truePositiveRate(int classIndex) {
double correct = 0, total = 0;
for (int j = 0; j < m_NumClasses; j++) {
if (j == classIndex) {
correct += m_ConfusionMatrix[classIndex][j];
}
total += m_ConfusionMatrix[classIndex][j];
}
if (total == 0) {
return 0;
}
return correct / total;
}
/**
* Calculate the number of true negatives with respect to a particular class.
* This is defined as<p/>
* <pre>
* correctly classified negatives
* </pre>
*
* @param classIndex the index of the class to consider as "positive"
* @return the true positive rate
*/
public double numTrueNegatives(int classIndex) {
double correct = 0;
for (int i = 0; i < m_NumClasses; i++) {
if (i != classIndex) {
for (int j = 0; j < m_NumClasses; j++) {
if (j != classIndex) {
correct += m_ConfusionMatrix[i][j];
}
}
}
}
return correct;
}
/**
* Calculate the true negative rate with respect to a particular class.
* This is defined as<p/>
* <pre>
* correctly classified negatives
* ------------------------------
* total negatives
* </pre>
*
* @param classIndex the index of the class to consider as "positive"
* @return the true positive rate
*/
public double trueNegativeRate(int classIndex) {
double correct = 0, total = 0;
for (int i = 0; i < m_NumClasses; i++) {
if (i != classIndex) {
for (int j = 0; j < m_NumClasses; j++) {
if (j != classIndex) {
correct += m_ConfusionMatrix[i][j];
}
total += m_ConfusionMatrix[i][j];
}
}
}
if (total == 0) {
return 0;
}
return correct / total;
}
/**
* Calculate number of false positives with respect to a particular class.
* This is defined as<p/>
* <pre>
* incorrectly classified negatives
* </pre>
*
* @param classIndex the index of the class to consider as "positive"
* @return the false positive rate
*/
public double numFalsePositives(int classIndex) {
double incorrect = 0;
for (int i = 0; i < m_NumClasses; i++) {
if (i != classIndex) {
for (int j = 0; j < m_NumClasses; j++) {
if (j == classIndex) {
incorrect += m_ConfusionMatrix[i][j];
}
}
}
}
return incorrect;
}
/**
* Calculate the false positive rate with respect to a particular class.
* This is defined as<p/>
* <pre>
* incorrectly classified negatives
* --------------------------------
* total negatives
* </pre>
*
* @param classIndex the index of the class to consider as "positive"
* @return the false positive rate
*/
public double falsePositiveRate(int classIndex) {
double incorrect = 0, total = 0;
for (int i = 0; i < m_NumClasses; i++) {
if (i != classIndex) {
for (int j = 0; j < m_NumClasses; j++) {
if (j == classIndex) {
incorrect += m_ConfusionMatrix[i][j];
}
total += m_ConfusionMatrix[i][j];
}
}
}
if (total == 0) {
return 0;
}
return incorrect / total;
}
/**
* Calculate number of false negatives with respect to a particular class.
* This is defined as<p/>
* <pre>
* incorrectly classified positives
* </pre>
*
* @param classIndex the index of the class to consider as "positive"
* @return the false positive rate
*/
public double numFalseNegatives(int classIndex) {
double incorrect = 0;
for (int i = 0; i < m_NumClasses; i++) {
if (i == classIndex) {
for (int j = 0; j < m_NumClasses; j++) {
if (j != classIndex) {
incorrect += m_ConfusionMatrix[i][j];
}
}
}
}
return incorrect;
}
/**
* Calculate the false negative rate with respect to a particular class.
* This is defined as<p/>
* <pre>
* incorrectly classified positives
* --------------------------------
* total positives
* </pre>
*
* @param classIndex the index of the class to consider as "positive"
* @return the false positive rate
*/
public double falseNegativeRate(int classIndex) {
double incorrect = 0, total = 0;
for (int i = 0; i < m_NumClasses; i++) {
if (i == classIndex) {
for (int j = 0; j < m_NumClasses; j++) {
if (j != classIndex) {
incorrect += m_ConfusionMatrix[i][j];
}
total += m_ConfusionMatrix[i][j];
}
}
}
if (total == 0) {
return 0;
}
return incorrect / total;
}
/**
* Calculate the recall with respect to a particular class.
* This is defined as<p/>
* <pre>
* correctly classified positives
* ------------------------------
* total positives
* </pre><p/>
* (Which is also the same as the truePositiveRate.)
*
* @param classIndex the index of the class to consider as "positive"
* @return the recall
*/
public double recall(int classIndex) {
return truePositiveRate(classIndex);
}
/**
* Calculate the precision with respect to a particular class.
* This is defined as<p/>
* <pre>
* correctly classified positives
* ------------------------------
* total predicted as positive
* </pre>
*
* @param classIndex the index
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -