📄 averagingresultproducer.java
字号:
if ((m_ResultProducer != null) && (m_ResultProducer instanceof OptionHandler)) { newVector.addElement(new Option( "", "", 0, "\nOptions specific to result producer " + m_ResultProducer.getClass().getName() + ":")); Enumeration enu = ((OptionHandler)m_ResultProducer).listOptions(); while (enu.hasMoreElements()) { newVector.addElement(enu.nextElement()); } } return newVector.elements(); } /** * Parses a given list of options. <p/> * <!-- options-start --> * Valid options are: <p/> * * <pre> -F <field name> * The name of the field to average over. * (default "Fold")</pre> * * <pre> -X <num results> * The number of results expected per average. * (default 10)</pre> * * <pre> -S * Calculate standard deviations. * (default only averages)</pre> * * <pre> -W <class name> * The full class name of a ResultProducer. * eg: weka.experiment.CrossValidationResultProducer</pre> * * <pre> * Options specific to result producer weka.experiment.CrossValidationResultProducer: * </pre> * * <pre> -X <number of folds> * The number of folds to use for the cross-validation. * (default 10)</pre> * * <pre> -D * Save raw split evaluator output.</pre> * * <pre> -O <file/directory name/path> * The filename where raw output will be stored. * If a directory name is specified then then individual * outputs will be gzipped, otherwise all output will be * zipped to the named file. Use in conjuction with -D. (default splitEvalutorOut.zip)</pre> * * <pre> -W <class name> * The full class name of a SplitEvaluator. * eg: weka.experiment.ClassifierSplitEvaluator</pre> * * <pre> * Options specific to split evaluator weka.experiment.ClassifierSplitEvaluator: * </pre> * * <pre> -W <class name> * The full class name of the classifier. * eg: weka.classifiers.bayes.NaiveBayes</pre> * * <pre> -C <index> * The index of the class for which IR statistics * are to be output. (default 1)</pre> * * <pre> -I <index> * The index of an attribute to output in the * results. This attribute should identify an * instance in order to know which instances are * in the test set of a cross validation. if 0 * no output (default 0).</pre> * * <pre> -P * Add target and prediction columns to the result * for each fold.</pre> * * <pre> * Options specific to classifier weka.classifiers.rules.ZeroR: * </pre> * * <pre> -D * If set, classifier is run in debug mode and * may output additional info to the console</pre> * <!-- options-end --> * * All options after -- will be passed to the result producer. * * @param options the list of options as an array of strings * @throws Exception if an option is not supported */ public void setOptions(String[] options) throws Exception { String keyFieldName = Utils.getOption('F', options); if (keyFieldName.length() != 0) { setKeyFieldName(keyFieldName); } else { setKeyFieldName(CrossValidationResultProducer.FOLD_FIELD_NAME); } String numResults = Utils.getOption('X', options); if (numResults.length() != 0) { setExpectedResultsPerAverage(Integer.parseInt(numResults)); } else { setExpectedResultsPerAverage(10); } setCalculateStdDevs(Utils.getFlag('S', options)); String rpName = Utils.getOption('W', options); if (rpName.length() == 0) { throw new Exception("A ResultProducer must be specified with" + " the -W option."); } // Do it first without options, so if an exception is thrown during // the option setting, listOptions will contain options for the actual // RP. setResultProducer((ResultProducer)Utils.forName( ResultProducer.class, rpName, null)); if (getResultProducer() instanceof OptionHandler) { ((OptionHandler) getResultProducer()) .setOptions(Utils.partitionOptions(options)); } } /** * Gets the current settings of the result producer. * * @return an array of strings suitable for passing to setOptions */ public String [] getOptions() { String [] seOptions = new String [0]; if ((m_ResultProducer != null) && (m_ResultProducer instanceof OptionHandler)) { seOptions = ((OptionHandler)m_ResultProducer).getOptions(); } String [] options = new String [seOptions.length + 8]; int current = 0; options[current++] = "-F"; options[current++] = "" + getKeyFieldName(); options[current++] = "-X"; options[current++] = "" + getExpectedResultsPerAverage(); if (getCalculateStdDevs()) { options[current++] = "-S"; } if (getResultProducer() != null) { options[current++] = "-W"; options[current++] = getResultProducer().getClass().getName(); } options[current++] = "--"; System.arraycopy(seOptions, 0, options, current, seOptions.length); current += seOptions.length; while (current < options.length) { options[current++] = ""; } return options; } /** * Set a list of method names for additional measures to look for * in SplitEvaluators. This could contain many measures (of which only a * subset may be produceable by the current resultProducer) if an experiment * is the type that iterates over a set of properties. * @param additionalMeasures an array of measure names, null if none */ public void setAdditionalMeasures(String [] additionalMeasures) { m_AdditionalMeasures = additionalMeasures; if (m_ResultProducer != null) { System.err.println("AveragingResultProducer: setting additional " +"measures for " +"ResultProducer"); m_ResultProducer.setAdditionalMeasures(m_AdditionalMeasures); } } /** * Returns an enumeration of any additional measure names that might be * in the result producer * @return an enumeration of the measure names */ public Enumeration enumerateMeasures() { Vector newVector = new Vector(); if (m_ResultProducer instanceof AdditionalMeasureProducer) { Enumeration en = ((AdditionalMeasureProducer)m_ResultProducer). enumerateMeasures(); while (en.hasMoreElements()) { String mname = (String)en.nextElement(); newVector.addElement(mname); } } return newVector.elements(); } /** * Returns the value of the named measure * @param additionalMeasureName the name of the measure to query for its value * @return the value of the named measure * @throws IllegalArgumentException if the named measure is not supported */ public double getMeasure(String additionalMeasureName) { if (m_ResultProducer instanceof AdditionalMeasureProducer) { return ((AdditionalMeasureProducer)m_ResultProducer). getMeasure(additionalMeasureName); } else { throw new IllegalArgumentException("AveragingResultProducer: " +"Can't return value for : "+additionalMeasureName +". "+m_ResultProducer.getClass().getName()+" " +"is not an AdditionalMeasureProducer"); } } /** * Sets the dataset that results will be obtained for. * * @param instances a value of type 'Instances'. */ public void setInstances(Instances instances) { m_Instances = instances; } /** * Returns the tip text for this property * @return tip text for this property suitable for * displaying in the explorer/experimenter gui */ public String calculateStdDevsTipText() { return "Record standard deviations for each run."; } /** * Get the value of CalculateStdDevs. * * @return Value of CalculateStdDevs. */ public boolean getCalculateStdDevs() { return m_CalculateStdDevs; } /** * Set the value of CalculateStdDevs. * * @param newCalculateStdDevs Value to assign to CalculateStdDevs. */ public void setCalculateStdDevs(boolean newCalculateStdDevs) { m_CalculateStdDevs = newCalculateStdDevs; } /** * Returns the tip text for this property * @return tip text for this property suitable for * displaying in the explorer/experimenter gui */ public String expectedResultsPerAverageTipText() { return "Set the expected number of results to average per run. " +"For example if a CrossValidationResultProducer is being used " +"(with the number of folds set to 10), then the expected number " +"of results per run is 10."; } /** * Get the value of ExpectedResultsPerAverage. * * @return Value of ExpectedResultsPerAverage. */ public int getExpectedResultsPerAverage() { return m_ExpectedResultsPerAverage; } /** * Set the value of ExpectedResultsPerAverage. * * @param newExpectedResultsPerAverage Value to assign to * ExpectedResultsPerAverage. */ public void setExpectedResultsPerAverage(int newExpectedResultsPerAverage) { m_ExpectedResultsPerAverage = newExpectedResultsPerAverage; } /** * Returns the tip text for this property * @return tip text for this property suitable for * displaying in the explorer/experimenter gui */ public String keyFieldNameTipText() { return "Set the field name that will be unique for a run."; } /** * Get the value of KeyFieldName. * * @return Value of KeyFieldName. */ public String getKeyFieldName() { return m_KeyFieldName; } /** * Set the value of KeyFieldName. * * @param newKeyFieldName Value to assign to KeyFieldName. */ public void setKeyFieldName(String newKeyFieldName) { m_KeyFieldName = newKeyFieldName; m_CountFieldName = "Num_" + m_KeyFieldName; findKeyIndex(); } /** * Sets the object to send results of each run to. * * @param listener a value of type 'ResultListener' */ public void setResultListener(ResultListener listener) { m_ResultListener = listener; } /** * Returns the tip text for this property * @return tip text for this property suitable for * displaying in the explorer/experimenter gui */ public String resultProducerTipText() { return "Set the resultProducer for which results are to be averaged."; } /** * Get the ResultProducer. * * @return the ResultProducer. */ public ResultProducer getResultProducer() { return m_ResultProducer; } /** * Set the ResultProducer. * * @param newResultProducer new ResultProducer to use. */ public void setResultProducer(ResultProducer newResultProducer) { m_ResultProducer = newResultProducer; m_ResultProducer.setResultListener(this); findKeyIndex(); } /** * Gets a text descrption of the result producer. * * @return a text description of the result producer. */ public String toString() { String result = "AveragingResultProducer: "; result += getCompatibilityState(); if (m_Instances == null) { result += ": <null Instances>"; } else { result += ": " + Utils.backQuoteChars(m_Instances.relationName()); } return result; }} // AveragingResultProducer
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -