📄 performanceevaluator.java
字号:
// named performance criteria
performanceCriteria = new PerformanceVector();
for (int i = 0;i < CRITERIA_NAMES.length; i++) {
if (getParameterAsBoolean(CRITERIA_NAMES[i])) {
try {
performanceCriteria.addCriterion((PerformanceCriterion)CRITERIA_CLASSES[i].newInstance());
} catch (InstantiationException e) {
LogService.logMessage("Cannot instantiate "+CRITERIA_CLASSES[i]+".", LogService.ERROR);
} catch (IllegalAccessException e) {
LogService.logMessage("Cannot instantiate "+CRITERIA_CLASSES[i]+".", LogService.ERROR);
}
}
}
// multi class classification criteria
for (int i = 0;i < MultiClassificationPerformance.NAME.length; i++) {
if (getParameterAsBoolean(MultiClassificationPerformance.NAME[i])) {
performanceCriteria.addCriterion(new MultiClassificationPerformance(i));
}
}
// other (universal) classification criteria
for (int i = 0;i < BinaryClassificationPerformance.NAME.length; i++) {
if (getParameterAsBoolean(BinaryClassificationPerformance.NAME[i])) {
performanceCriteria.addCriterion(new BinaryClassificationPerformance(i));
}
}
// user defined performance criteria
Iterator i = getParameterList("additional_performance_criteria").iterator();
while (i.hasNext()) {
Object[] keyValue = (Object[])i.next();
String className = (String)keyValue[0];
String parameter = (String)keyValue[1];
Class criterionClass = null;
try {
criterionClass = edu.udo.cs.yale.tools.Tools.classForName(className);
if (PerformanceCriterion.class.isAssignableFrom(criterionClass)) {
if ((parameter != null) && (parameter.trim().length() > 0)) {
java.lang.reflect.Constructor constructor = criterionClass.getConstructor(new Class[] {String.class });
performanceCriteria.addCriterion((PerformanceCriterion)constructor.newInstance(new Object[] {parameter}));
} else {
performanceCriteria.addCriterion((PerformanceCriterion)criterionClass.newInstance());
}
} else {
throw new UserError(this, 914, new Object[] {criterionClass, PerformanceCriterion.class});
}
} catch (ClassNotFoundException e) {
throw new UserError(this, e, 904, new Object[] {criterionClass, e});
} catch (InstantiationException e) {
throw new UserError(this, e, 904, new Object[] {criterionClass, e});
} catch (IllegalAccessException e) {
throw new UserError(this, e, 904, new Object[] {criterionClass, e});
} catch (NoSuchMethodException e) {
throw new UserError(this, e, 904, new Object[] {criterionClass, e});
} catch (java.lang.reflect.InvocationTargetException e) {
throw new UserError(this, e, 904, new Object[] {criterionClass, e});
}
}
//performanceCriteria.setMainCriterionName(allCriteriaNames[mainCriterionIndex]);
performanceCriteria.setMainCriterionName(getParameterAsString("main_criterion"));
String comparatorClass = getParameterAsString("comparator_class");
if (comparatorClass == null) {
performanceCriteria.setComparator(new PerformanceVector.DefaultComparator(getParameterAsDouble("mdl_weight")));
} else {
try {
Class pcClass = edu.udo.cs.yale.tools.Tools.classForName(comparatorClass);
if (!PerformanceComparator.class.isAssignableFrom(pcClass)) {
throw new UserError(this, 914, new Object[] { pcClass, PerformanceComparator.class });
} else {
performanceCriteria.setComparator((PerformanceComparator)pcClass.newInstance());
}
} catch (Throwable e) {
throw new UserError(this, e, 904, new Object[] { comparatorClass, e });
}
}
if (performanceCriteria.size() == 0) {
throw new UserError(this, 910);
}
}
public IOObject[] apply() throws OperatorException {
initialisePerformanceVector();
// iterate over all examples and sum up deviations
ExampleSet testSet = (ExampleSet)getInput(ExampleSet.class);
evaluate(testSet, performanceCriteria, getParameterAsBoolean("skip_undefined_labels"));
return new IOObject[] { performanceCriteria };
}
/** Evaluates the given test set. All {@link PerformanceCriterion} instances in
* the given {@link PerformanceVector} must be subclasses of {@link MeasuredPerformance}. */
private void evaluate(ExampleSet testSet, PerformanceVector performanceCriteria, boolean skipUndefinedLabels)
throws OperatorException {
evaluate(this, testSet, performanceCriteria, skipUndefinedLabels);
}
/** Static version of {@link #evaluate(ExampleSet,PerformanceVector,boolean)}. This method was introduced
* to enable testing of the method.
* @param evaluator Ususally this. May be null for testing. Only needed for exception. */
public static void evaluate(PerformanceEvaluator evaluator,
ExampleSet testSet,
PerformanceVector performanceCriteria,
boolean skipUndefinedLabels)
throws OperatorException {
if (testSet.getLabel() == null) throw new UserError(evaluator, 105, new Object[0]);
if (testSet.getPredictedLabel() == null) throw new UserError(evaluator, 107, new Object[0]);
// initialise all criteria
for (int pc = 0; pc < performanceCriteria.size(); pc++) {
PerformanceCriterion c = performanceCriteria.getCriterion(pc);
if (!(c instanceof MeasuredPerformance)) {
throw new UserError(evaluator, 903, new Object[0]);
}
((MeasuredPerformance)c).startCounting(testSet);
}
ExampleReader exampleIterator = testSet.getExampleReader();
while (exampleIterator.hasNext()) {
Example example = exampleIterator.next();
if (skipUndefinedLabels &&
(Double.isNaN(example.getLabel()) ||
Double.isNaN(example.getPredictedLabel()))) continue;
for (int pc = 0; pc < performanceCriteria.size(); pc++) {
((MeasuredPerformance)performanceCriteria.getCriterion(pc)).countExample(example);
}
}
}
public Class[] getInputClasses() {
return INPUT_CLASSES;
}
public Class[] getOutputClasses() {
return OUTPUT_CLASSES;
}
public List getParameterTypes() {
List types = super.getParameterTypes();
for (int i = 0; i < CRITERIA_NAMES.length; i++) {
ParameterType type = new ParameterTypeBoolean(CRITERIA_NAMES[i], allCriteriaDescriptions[i], false);
type.setExpert(false);
types.add(type);
}
for (int i = 0;i < MultiClassificationPerformance.NAME.length; i++) {
ParameterType type = new ParameterTypeBoolean(MultiClassificationPerformance.NAME[i],
MultiClassificationPerformance.DESCRIPTION[i],
false);
type.setExpert(false);
types.add(type);
}
for (int i = 0;i < BinaryClassificationPerformance.NAME.length; i++) {
ParameterType type = new ParameterTypeBoolean(BinaryClassificationPerformance.NAME[i],
BinaryClassificationPerformance.DESCRIPTION[i],
false);
type.setExpert(false);
types.add(type);
}
ParameterType type = new ParameterTypeStringCategory("main_criterion", "The criterion used for comparing performance vectors.", allCriteriaNames, allCriteriaNames[0]);
type.setExpert(false);
types.add(type);
types.add(new ParameterTypeDouble("mdl_weight", "The weight for the mdl criteria if used for performance comparisons.",
0.0d, Double.POSITIVE_INFINITY, 0.0d));
types.add(new ParameterTypeBoolean("skip_undefined_labels", "If set to true, examples with undefined labels are skipped.", false));
types.add(new ParameterTypeString("comparator_class", "Fully qualified classname of the PerformanceComparator implementation.", true));
types.add(new ParameterTypeList("additional_performance_criteria",
"List of classes that implement edu.udo.cs.yale.operator.performance.PerformanceCriterion.",
new ParameterTypeString("additional_performance_criteria",
"The key must be a fully qualified classname and the value must be a string that is passed to the constructor of this class.",
false)));
return types;
}
}
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -