⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 performanceevaluator.java

📁 著名的开源仿真软件yale
💻 JAVA
字号:
/* *  YALE - Yet Another Learning Environment *  Copyright (C) 2002, 2003 *      Simon Fischer, Ralf Klinkenberg, Ingo Mierswa,  *          Katharina Morik, Oliver Ritthoff *      Artificial Intelligence Unit *      Computer Science Department *      University of Dortmund *      44221 Dortmund,  Germany *  email: yale@ls8.cs.uni-dortmund.de *  web:   http://yale.cs.uni-dortmund.de/ * *  This program is free software; you can redistribute it and/or *  modify it under the terms of the GNU General Public License as  *  published by the Free Software Foundation; either version 2 of the *  License, or (at your option) any later version.  * *  This program is distributed in the hope that it will be useful, but *  WITHOUT ANY WARRANTY; without even the implied warranty of *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU *  General Public License for more details. * *  You should have received a copy of the GNU General Public License *  along with this program; if not, write to the Free Software *  Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 *  USA. */package edu.udo.cs.yale.operator.performance;import edu.udo.cs.yale.operator.parameter.*;import edu.udo.cs.yale.operator.OperatorException;import edu.udo.cs.yale.operator.*;import edu.udo.cs.yale.example.*;import java.io.FileNotFoundException;import edu.udo.cs.yale.tools.LogService;import edu.udo.cs.yale.tools.ParameterService;import java.util.*;/** A performance evaluator is an operator that expects a test {@link ExampleSet} as *  input, whose elements have both true and predicted labels, and delivers as output a *  list of performance values according to a list of performance criteria that *  it calculates. *  <br/> *  All of the performance criteria can be switched on using boolean parameters. Their  *  values can be queried by a {@link edu.udo.cs.yale.operator.ExperimentLogOperator} using *  the same names. The main criterion is used for comparisons *  and need to be specified only for experiments where performance vectores are compared,  *  e.g. feature selection experiments. *  <br/> *  Other implementations than the simple comparator, that only compares the main criterion, *  can be specified using the parameter <var>comparator_class</var>. This may for instance *  be useful if you want to compare performance vectors according to the weighted sum of the  *  individual criteria. In order to implement your own comparator, simply subclass *  {@link PerformanceComparator}. *   * *  @yale.xmlclass PerformanceEvaluator *  @author Simon *  @version $Id: PerformanceEvaluator.java,v 2.15 2003/09/10 13:02:06 fischer Exp $ */public class PerformanceEvaluator extends Operator {    /** Names of the performance criteria which can be used in experiment configuration files. */    private static final String[] CRITERIA_NAMES = {	"absolute",	"scaled",	"squared",        "relative",	"prediction_average"    };    /** The proper criteria to the names. */    private static final Class[] CRITERIA_CLASSES = {	edu.udo.cs.yale.operator.performance.AbsoluteError.class,	edu.udo.cs.yale.operator.performance.ScaledError.class,	edu.udo.cs.yale.operator.performance.SquaredError.class,        edu.udo.cs.yale.operator.performance.RelativeError.class,        edu.udo.cs.yale.operator.performance.PredictionAverage.class    };    private static String[] allCriteriaNames;    static {	allCriteriaNames = new String[CRITERIA_NAMES.length+UniversalClassificationCriterion.NAME.length];	System.arraycopy(CRITERIA_NAMES, 0, 			 allCriteriaNames,    0, 			 CRITERIA_NAMES.length);	System.arraycopy(UniversalClassificationCriterion.NAME, 0, 			 allCriteriaNames, CRITERIA_NAMES.length,			 UniversalClassificationCriterion.NAME.length);    }    private static final Class[] INPUT_CLASSES = { ExampleSet.class };    private static final Class[] OUTPUT_CLASSES = { PerformanceVector.class };    /** Maps criteria names to classes. */    private static Map classnameMap;    public PerformanceEvaluator() {	for (int i = 0;i < CRITERIA_NAMES.length; i++) {	    addPerformanceValue(CRITERIA_NAMES[i]);	}	for (int i = 0;i < UniversalClassificationCriterion.NAME.length; i++) {	    addPerformanceValue(UniversalClassificationCriterion.NAME[i]);	}    }    private void addPerformanceValue(final String name) {	addValue(new Value(name, "The performance criterion "+name) {		public double getValue() {		    if (performanceCriteria == null) return Double.NaN;		    PerformanceCriterion c = performanceCriteria.get(name);		    if (c != null) {			return c.getValue();		    } else {			return Double.NaN;		    }		}	    });    }    static {	classnameMap = new HashMap();	for (int i = 0; i < CRITERIA_NAMES.length; i++)	    classnameMap.put(CRITERIA_NAMES[i], CRITERIA_CLASSES[i]);    }    private boolean skipUndefinedLabels = false;    private PerformanceVector performanceCriteria;    private void initialisePerformanceVector() throws OperatorException {	int mainCriterionIndex = getParameterAsInt("main_criterion");	performanceCriteria = new PerformanceVector();	for (int i = 0;i < CRITERIA_NAMES.length; i++) {	    if (getParameterAsBoolean(CRITERIA_NAMES[i])) {		try {		    performanceCriteria.addCriterion((PerformanceCriterion)CRITERIA_CLASSES[i].newInstance());		} catch (InstantiationException e) {		    LogService.logMessage("Cannot instantiate "+CRITERIA_CLASSES[i]+".", LogService.ERROR);		} catch (IllegalAccessException e) {		    LogService.logMessage("Cannot instantiate "+CRITERIA_CLASSES[i]+".", LogService.ERROR);		}	    }	}	for (int i = 0;i < UniversalClassificationCriterion.NAME.length; i++) {	    if (getParameterAsBoolean(UniversalClassificationCriterion.NAME[i])) {		performanceCriteria.addCriterion(new UniversalClassificationCriterion(i));	    }	}	performanceCriteria.setMainCriterionName(allCriteriaNames[mainCriterionIndex]);	String comparatorClass = getParameterAsString("comparator_class");	if (comparatorClass == null) {	    performanceCriteria.setComparator(new PerformanceVector.DefaultComparator());	} else {	    try {		Class pcClass = Class.forName(comparatorClass);		if (!PerformanceComparator.class.isAssignableFrom(pcClass)) {		    LogService.logMessage(getName()+": '"+pcClass+"' is not a PerformanceComparator.", LogService.MINIMUM);		} else {		    performanceCriteria.setComparator((PerformanceComparator)pcClass.newInstance());		}	    } catch (Throwable e) {		throw new UserError(this, e, 904, new Object[] { comparatorClass, e.getMessage() });	    }	}    }    public IOObject[] apply() throws OperatorException {	initialisePerformanceVector();	// iterate over all examples and sum up deviations	ExampleSet testSet = (ExampleSet)getInput(ExampleSet.class);	evaluate(testSet, performanceCriteria, skipUndefinedLabels);	return new IOObject[] { performanceCriteria };    }    /** Evaluates the given test set. All {@link PerformanceCriterion} instances in      *  the given {@link PerformanceVector} must be subclasses of {@link MeasuredPerformance}. */    private void evaluate(ExampleSet testSet, PerformanceVector performanceCriteria, boolean skipUndefinedLabels)     throws OperatorException {	evaluate(this, testSet, performanceCriteria, skipUndefinedLabels);    }    /** Static version of {@link #evaluate(ExampleSet,PerformanceVector,boolean)}. This method was introduced     *  to enable testing of the method.     *  @param evaluator Ususally this. May be null for testing. Only needed for exception. */    public static void evaluate(PerformanceEvaluator evaluator,				ExampleSet testSet, 				PerformanceVector performanceCriteria, 				boolean skipUndefinedLabels)	throws OperatorException {	if (testSet.getLabel() == null) throw new UserError(evaluator, 105, new Object[0]);	if (testSet.getPredictedLabel() == null) throw new UserError(evaluator, 107, new Object[0]);	// initialise all criteria	for (int pc = 0; pc < performanceCriteria.size(); pc++) {	    PerformanceCriterion c = performanceCriteria.get(pc);	    if (!(c instanceof MeasuredPerformance)) {		throw new UserError(evaluator, 903, new Object[0]);	    }	    ((MeasuredPerformance)c).startCounting(testSet);	}	ExampleReader exampleIterator = testSet.getExampleReader();	while (exampleIterator.hasNext()) {	    Example example = exampleIterator.next();	    	    if (skipUndefinedLabels && 		(Double.isNaN(example.getLabel()) ||		 Double.isNaN(example.getPredictedLabel()))) continue;	    	    for (int pc = 0; pc < performanceCriteria.size(); pc++) {		((MeasuredPerformance)performanceCriteria.get(pc)).countExample(example);	    }	}    }    public void initApply() throws OperatorException {	super.initApply();	skipUndefinedLabels = getParameterAsBoolean("skip_undefined_labels");    }    public Class[] getInputClasses() {      return INPUT_CLASSES;    }    public Class[] getOutputClasses() {	return OUTPUT_CLASSES;    }    public List getParameterTypes() {	List types = super.getParameterTypes();	for (int i = 0; i < CRITERIA_NAMES.length; i++) {	    types.add(new ParameterTypeBoolean(CRITERIA_NAMES[i], "The performance criterion "+CRITERIA_NAMES[i]+" error.", false));	}	for (int i = 0;i < UniversalClassificationCriterion.NAME.length; i++) {	    types.add(new ParameterTypeBoolean(UniversalClassificationCriterion.NAME[i], "The classification performance criterion "+UniversalClassificationCriterion.NAME[i]+".", false));	}	types.add(new ParameterTypeCategory("main_criterion", "The criterion used for comparing performacne vectors.", allCriteriaNames, 0));	types.add(new ParameterTypeBoolean("skip_undefined_labels", "If set to true, examples with undefined labels are skipped.", false));	types.add(new ParameterTypeString("comparator_class", "Fully qualified classname of the PerformanceComparator implementation.", true));	return types;    }}

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -