📄 dagging.java
字号:
* Random number seed. * (default 1)</pre> * * <pre> -D * If set, classifier is run in debug mode and * may output additional info to the console</pre> * * <pre> -W * Full name of base classifier. * (default: weka.classifiers.functions.SMO)</pre> * * <pre> * Options specific to classifier weka.classifiers.functions.SMO: * </pre> * * <pre> -D * If set, classifier is run in debug mode and * may output additional info to the console</pre> * * <pre> -no-checks * Turns off all checks - use with caution! * Turning them off assumes that data is purely numeric, doesn't * contain any missing values, and has a nominal class. Turning them * off also means that no header information will be stored if the * machine is linear. Finally, it also assumes that no instance has * a weight equal to 0. * (default: checks on)</pre> * * <pre> -C <double> * The complexity constant C. (default 1)</pre> * * <pre> -N * Whether to 0=normalize/1=standardize/2=neither. (default 0=normalize)</pre> * * <pre> -L <double> * The tolerance parameter. (default 1.0e-3)</pre> * * <pre> -P <double> * The epsilon for round-off error. (default 1.0e-12)</pre> * * <pre> -M * Fit logistic models to SVM outputs. </pre> * * <pre> -V <double> * The number of folds for the internal * cross-validation. (default -1, use training data)</pre> * * <pre> -W <double> * The random number seed. (default 1)</pre> * * <pre> -K <classname and parameters> * The Kernel to use. * (default: weka.classifiers.functions.supportVector.PolyKernel)</pre> * * <pre> * Options specific to kernel weka.classifiers.functions.supportVector.PolyKernel: * </pre> * * <pre> -D * Enables debugging output (if available) to be printed. * (default: off)</pre> * * <pre> -no-checks * Turns off all checks - use with caution! * (default: checks on)</pre> * * <pre> -C <num> * The size of the cache (a prime number). * (default: 250007)</pre> * * <pre> -E <num> * The Exponent to use. * (default: 1.0)</pre> * * <pre> -L * Use lower-order terms. * (default: no)</pre> * <!-- options-end --> * * Options after -- are passed to the designated classifier.<p> * * @param options the list of options as an array of strings * @throws Exception if an option is not supported */ public void setOptions(String[] options) throws Exception { String tmpStr; tmpStr = Utils.getOption('F', options); if (tmpStr.length() != 0) setNumFolds(Integer.parseInt(tmpStr)); else setNumFolds(10); setVerbose(Utils.getFlag("verbose", options)); super.setOptions(options); } /** * Gets the current settings of the Classifier. * * @return an array of strings suitable for passing to setOptions */ public String[] getOptions() { Vector result; String[] options; int i; result = new Vector(); result.add("-F"); result.add("" + getNumFolds()); if (getVerbose()) result.add("-verbose"); options = super.getOptions(); for (i = 0; i < options.length; i++) result.add(options[i]); return (String[]) result.toArray(new String[result.size()]); } /** * Gets the number of folds to use for splitting the training set. * * @return the number of folds */ public int getNumFolds() { return m_NumFolds; } /** * Sets the number of folds to use for splitting the training set. * * @param value the new number of folds */ public void setNumFolds(int value) { if (value > 0) m_NumFolds = value; else System.out.println( "At least 1 fold is necessary (provided: " + value + ")!"); } /** * Returns the tip text for this property * * @return tip text for this property suitable for * displaying in the explorer/experimenter gui */ public String numFoldsTipText() { return "The number of folds to use for splitting the training set into smaller chunks for the base classifier."; } /** * Set the verbose state. * * @param value the verbose state */ public void setVerbose(boolean value) { m_Verbose = value; } /** * Gets the verbose state * * @return the verbose state */ public boolean getVerbose() { return m_Verbose; } /** * Returns the tip text for this property * @return tip text for this property suitable for * displaying in the explorer/experimenter gui */ public String verboseTipText() { return "Whether to ouput some additional information during building."; } /** * Bagging method. * * @param data the training data to be used for generating the * bagged classifier. * @throws Exception if the classifier could not be built successfully */ public void buildClassifier(Instances data) throws Exception { Classifier[] base; int i; int n; int fromIndex; int toIndex; Instances train; double chunkSize; // can classifier handle the data? getCapabilities().testWithFail(data); // remove instances with missing class data = new Instances(data); data.deleteWithMissingClass(); m_Vote = new Vote(); base = new Classifier[getNumFolds()]; chunkSize = (double) data.numInstances() / (double) getNumFolds(); // stratify data if (getNumFolds() > 1) data.stratify(getNumFolds()); // generate <folds> classifiers for (i = 0; i < getNumFolds(); i++) { base[i] = makeCopy(getClassifier()); // generate training data if (getNumFolds() > 1) { // some progress information if (getVerbose()) System.out.print("."); train = new Instances(data, 0); fromIndex = (int) ((double) i * chunkSize); toIndex = (int) (((double) i + 1) * chunkSize) - 1; if (i == getNumFolds() - 1) toIndex = data.numInstances() - 1; for (n = fromIndex; n < toIndex; n++) train.add(data.instance(n)); } else { train = data; } // train classifier base[i].buildClassifier(train); } // init vote m_Vote.setClassifiers(base); if (getVerbose()) System.out.println(); } /** * Calculates the class membership probabilities for the given test * instance. * * @param instance the instance to be classified * @return preedicted class probability distribution * @throws Exception if distribution can't be computed successfully */ public double[] distributionForInstance(Instance instance) throws Exception { return m_Vote.distributionForInstance(instance); } /** * Returns description of the classifier. * * @return description of the classifier as a string */ public String toString() { if (m_Vote == null) return this.getClass().getName().replaceAll(".*\\.", "") + ": No model built yet."; else return m_Vote.toString(); } /** * Main method for testing this class. * * @param args the options */ public static void main(String[] args) { runClassifier(new Dagging(), args); }}
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -