⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 y45.java

📁 一个很好的LIBSVM的JAVA源码。对于要研究和改进SVM算法的学者。可以参考。来自数据挖掘工具YALE工具包。
💻 JAVA
字号:
/*
 *  YALE - Yet Another Learning Environment
 *  Copyright (C) 2001-2004
 *      Simon Fischer, Ralf Klinkenberg, Ingo Mierswa, 
 *          Katharina Morik, Oliver Ritthoff
 *      Artificial Intelligence Unit
 *      Computer Science Department
 *      University of Dortmund
 *      44221 Dortmund,  Germany
 *  email: yale-team@lists.sourceforge.net
 *  web:   http://yale.cs.uni-dortmund.de/
 *
 *  This program is free software; you can redistribute it and/or
 *  modify it under the terms of the GNU General Public License as 
 *  published by the Free Software Foundation; either version 2 of the
 *  License, or (at your option) any later version. 
 *
 *  This program is distributed in the hope that it will be useful, but
 *  WITHOUT ANY WARRANTY; without even the implied warranty of
 *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
 *  General Public License for more details.
 *
 *  You should have received a copy of the GNU General Public License
 *  along with this program; if not, write to the Free Software
 *  Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
 *  USA.
 */
package edu.udo.cs.yale.operator.learner.decisiontree.y45;

import edu.udo.cs.yale.operator.parameter.*;
import edu.udo.cs.yale.operator.learner.AbstractLearner;
import edu.udo.cs.yale.operator.learner.Model;
import edu.udo.cs.yale.operator.learner.weka.WekaClassifier;
import edu.udo.cs.yale.operator.OperatorException;
import edu.udo.cs.yale.operator.UserError;
import edu.udo.cs.yale.example.Attribute;
import edu.udo.cs.yale.example.Example;
import edu.udo.cs.yale.example.ExampleReader;
import edu.udo.cs.yale.example.ExampleSet;
import edu.udo.cs.yale.tools.Ontology;
import edu.udo.cs.yale.tools.RandomGenerator;
import edu.udo.cs.yale.tools.LogService;
import edu.udo.cs.yale.tools.WekaTools;

import weka.classifiers.Classifier;
import weka.core.Instances;

import java.util.List;
import java.util.LinkedList;
import java.util.Iterator;

/** Y45 create an unpruned or pruned decision tree like C4.5 (<a TARGET="_top" href="http://www.cse.unsw.edu.au/~quinlan/">C4.5</a> program version 8 
 *  by Ross Quinlan {@yale.cite Quinlan/93b}).
 *  This operator uses the J48 classifier from the
 *  <a href="http://www.cs.waikato.ac.nz/~ml/weka/">Weka</a> package 3.4.1. <br/> 
 *  Instead of directly using the Weka classes an internal implementation is used. This allows 
 *  Weka independent adaptions of the tree induction algorithm.
 *
 *  @version $Id: Y45.java,v 1.5 2004/09/10 21:52:49 ingomierswa Exp $
 */
public class Y45 extends AbstractLearner {

    public Model learn(ExampleSet exampleSet) throws OperatorException {
	J48 j48 = new J48();
	try {
	    j48.setOptions(getWekaParameters());
	} catch (Exception e) {
	    throw new UserError(this, e, 904, new Object[] { "Internal J48", e});
	}

	LogService.logMessage(getName() + ": Converting to Weka instances.", LogService.MINIMUM);
	Instances instances = WekaTools.toWekaInstances(exampleSet, "TempInstances", exampleSet.getLabel(), true);
	try {
	    LogService.logMessage(getName() + ": Building Weka classifier.", LogService.MINIMUM);
	    j48.buildClassifier(instances);
	} catch (Exception e) {
	    throw new UserError(this, e, 905, new Object[] {"Internal J48", e});
	}
	boolean useDist = getParameterAsBoolean("use_distribution");
	return new WekaClassifier(exampleSet.getLabel(), j48, useDist);
    }

    private String[] getWekaParameters() {
	List parameters = new LinkedList();
	if (getParameterAsBoolean("use_laplace")) parameters.add("-A");
	if (getParameterAsBoolean("use_unpruned")) {
	    parameters.add("-U");
	} else {
	    // these parameters only make sense for pruned trees
	    if (getParameterAsBoolean("use_reduced_error_pruning")) {
		parameters.add("-R");
		parameters.add("-N");
		parameters.add(getParameterAsInt("reduced_folds") +"");
	    } else {
		parameters.add("-C");
		parameters.add(getParameterAsDouble("confidence") +"");
	    }
	    if (getParameterAsBoolean("no_subtree_raising")) parameters.add("-S"); 
	}
	if (getParameterAsBoolean("use_binary_splits")) parameters.add("-B"); 
	if (getParameterAsBoolean("no_cleanup")) parameters.add("-L"); 
	
	parameters.add("-M");
	parameters.add(getParameterAsInt("min_leaf_size") +"");
	parameters.add("-Q");
	parameters.add(RandomGenerator.getGlobalRandomGenerator().getSeed() +"");

	String[] result = new String[parameters.size()];
	parameters.toArray(result);
	return result;
    }

    public List getParameterTypes() {
	List types = super.getParameterTypes();
 	types.add(new ParameterTypeBoolean("use_distribution", "If set to true, the prediction of the model will not be the class, but the confidence for that class.", false));
 	types.add(new ParameterTypeBoolean("use_unpruned", "If set to true, an unpruned tree is created.", false));
 	types.add(new ParameterTypeDouble("confidence", "Set confidence threshold for pruning.", 0.0d, 1.0d, 0.25));
 	types.add(new ParameterTypeInt("min_leaf_size", "Set minimum number of instances per leaf.", 1, Integer.MAX_VALUE, 2));
 	types.add(new ParameterTypeBoolean("use_laplace", "Determines whether probabilities are smoothed using Laplace correction when predictions are generated.", false));
 	types.add(new ParameterTypeBoolean("use_reduced_error_pruning", "Use reduced error pruning. No subtree raising is performed.", false));
 	types.add(new ParameterTypeInt("reduced_folds", "Set number of folds for reduced error pruning. One fold is used as the pruning set.", 1, Integer.MAX_VALUE, 3));
 	types.add(new ParameterTypeBoolean("use_binary_splits", "Use binary splits for nominal attributes.", false));
 	types.add(new ParameterTypeBoolean("no_subtree_raising", "Use subtree raising.", false));
 	types.add(new ParameterTypeBoolean("no_cleanup", "Cleanup after building of tree.", false));
	return types;
    }
}

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -