📄 aode.java
字号:
/*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
*/
/*
* AODE.java
* Copyright (C) 2003
* Algorithm developed by: Geoff Webb
* Code written by: Janice Boughton & Zhihai Wang
*/
package weka.classifiers.bayes;
import java.util.Enumeration;
import java.util.Vector;
import weka.classifiers.Classifier;
import weka.classifiers.Evaluation;
import weka.core.Attribute;
import weka.core.Instance;
import weka.core.Instances;
import weka.core.Option;
import weka.core.OptionHandler;
import weka.core.Utils;
import weka.core.WeightedInstancesHandler;
/**
* AODE achieves highly accurate classification by averaging over all
* of a small space of alternative naive-Bayes-like models that have
* weaker (and hence less detrimental) independence assumptions than
* naive Bayes. The resulting algorithm is computationally efficient while
* delivering highly accurate classification on many learning tasks.<br>
* For more information, see<p>
* G. Webb, J. Boughton & Z. Wang (2003). <i>Not So Naive Bayes.</i>
* Submitted for publication<br>
* G. Webb, J. Boughton & Z. Wang (2002). <i>Averaged One-Dependence
* Estimators: Preliminary Results.</i> AI2002 Data Mining Workshop, Canberra.
*
* Valid options are:<p>
*
* -D <br>
* Debugging information is printed if this flag is specified.<p>
*
* -F <br>
* Specify the frequency limit for parent attributes.<p>
*
* @author Janice Boughton (jrbought@csse.monash.edu.au) & Zhihai Wang (zhw@csse.monash.edu.au)
* @version $Revision$
*/
public class AODE extends Classifier
implements OptionHandler, WeightedInstancesHandler {
/**
*
*/
private static final long serialVersionUID = 4788178436124501440L;
/**
* 3D array (m_NumClasses * m_TotalAttValues * m_TotalAttValues)
* of attribute counts
*/
private double [][][] m_CondiCounts;
/** The number of times each class value occurs in the dataset */
private double [] m_ClassCounts;
/** The sums of attribute-class counts
* -- if there are no missing values for att, then m_SumForCounts[classVal][att] will
* be the same as m_ClassCounts[classVal]
*/
private int [][] m_SumForCounts;
/** The number of classes */
private int m_NumClasses;
/** The number of attributes in dataset, including class */
private int m_NumAttributes;
/** The number of instances in the dataset */
private int m_NumInstances;
/** The index of the class attribute */
private int m_ClassIndex;
/** The dataset */
private Instances m_Instances;
/**
* The total number of values for all attributes (not including
* class). Eg. for three atts each with two possible values,
* m_TotalAttValues would be 6.
* This variable is used when allocating space for m_CondiCounts matrix.
*/
private int m_TotalAttValues;
/** The starting index (in the m_CondiCounts matrix) of each attribute */
private int [] m_StartAttIndex;
/** The number of values for each attribute */
private int [] m_NumAttValues;
/** The frequency of each attribute value for the dataset */
private int [] m_Frequencies;
/** The number of valid class values observed in dataset
* -- with no missing classes, this number is the same as m_NumInstances.
*/
private double m_SumInstances;
/** An att's frequency must be this value or more to be a superParent */
private int m_Limit;
/** If true, outputs debugging info */
private boolean m_Debug = false;
/**
* Returns a string describing this classifier
* @return a description of the classifier suitable for
* displaying in the explorer/experimenter gui
*/
public String globalInfo() {
return "AODE achieves highly accurate classification by averaging over all "
+"of a small space of alternative naive-Bayes-like models that have "
+"weaker (and hence less detrimental) independence assumptions than "
+"naive Bayes. The resulting algorithm is computationally efficient while "
+"delivering highly accurate classification on many learning tasks.\n\n"
+"For more information, see\n\n"
+"G. Webb, J. Boughton & Z. Wang (2003). Not So Naive Bayes. "
+"Submitted for publication "
+"G. Webb, J. Boughton & Z. Wang (2002). <i>Averaged One-Dependence "
+"Estimators: Preliminary Results. AI2002 Data Mining Workshop, Canberra.";
}
/**
* Generates the classifier.
*
* @param instances set of instances serving as training data
* @exception Exception if the classifier has not been generated
* successfully
*/
public void buildClassifier(Instances instances) throws Exception {
// reset variable for this fold
m_SumInstances = 0;
m_NumClasses = instances.numClasses();
if(m_NumClasses < 2) {
throw new Exception ("Dataset has no class attribute");
}
if(instances.classAttribute().isNumeric()) {
throw new Exception("AODE: Class is numeric!");
}
if(instances.checkForStringAttributes()) {
throw new Exception("AODE: String attributes are not allowed.");
}
m_ClassIndex = instances.classIndex();
m_NumAttributes = instances.numAttributes();
for(int att = 0; att < m_NumAttributes; att++) {
Attribute attribute = (Attribute)instances.attribute(att);
if(!attribute.isNominal()) {
throw new Exception("Attributes must be nominal. " +
"Discretize dataset with FilteredClassifer.");
}
}
// copy the instances
m_Instances = instances;
m_NumInstances = m_Instances.numInstances();
// allocate space for attribute reference arrays
m_StartAttIndex = new int[m_NumAttributes];
m_NumAttValues = new int[m_NumAttributes];
m_TotalAttValues = 0;
for(int i = 0; i < m_NumAttributes; i++) {
if(i != m_ClassIndex) {
m_StartAttIndex[i] = m_TotalAttValues;
m_NumAttValues[i] = m_Instances.attribute(i).numValues();
m_TotalAttValues += m_NumAttValues[i];
} else {
// m_StartAttIndex[i] = -1; // class isn't included
m_NumAttValues[i] = m_NumClasses;
}
}
// allocate space for counts and frequencies
m_CondiCounts = new double[m_NumClasses][m_TotalAttValues][m_TotalAttValues];
m_ClassCounts = new double[m_NumClasses];
m_SumForCounts = new int[m_NumClasses][m_NumAttributes];
m_Frequencies = new int[m_TotalAttValues];
// Calculate the counts
for(int k = 0; k < m_NumInstances; k++) {
addToCounts((Instance)m_Instances.instance(k));
}
// free up some space
m_Instances = new Instances(m_Instances, 0);
}
/**
* Puts an instance's values into m_CondiCounts, m_ClassCounts and
* m_SumInstances.
*
* @param instance the instance whose values are to be put into the counts variables
*
*/
private void addToCounts(Instance instance) {
double [] pointer;
int classVal = (int)instance.classValue();
double weight = instance.weight();
m_ClassCounts[classVal] += weight;
m_SumInstances += weight;;
// store instance's att vals in an int array, b/c accessing it in the loop(s) is more efficient
int [] attIndex = new int[m_NumAttributes];
for(int i = 0; i < m_NumAttributes; i++) {
if(instance.isMissing(i) || i == m_ClassIndex)
attIndex[i] = -1;
else
attIndex[i] = m_StartAttIndex[i] + (int)instance.value(i);
}
for(int Att1 = 0; Att1 < m_NumAttributes; Att1++) {
if(attIndex[Att1] == -1)
continue; // avoid pointless looping
m_Frequencies[attIndex[Att1]] += weight;
m_SumForCounts[classVal][Att1] += weight;
// save time by referencing this now, rather than do it repeatedly in the loop
pointer = m_CondiCounts[classVal][attIndex[Att1]];
for(int Att2 = 0; Att2 < m_NumAttributes; Att2++) {
if((attIndex[Att2] != -1)) {
pointer[attIndex[Att2]] += weight;
}
}
}
}
/**
* Calculates the class membership probabilities for the given test
* instance.
*
* @param instance the instance to be classified
* @return predicted class probability distribution
* @exception Exception if there is a problem generating the prediction
*/
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -