⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 predictionappender.java

📁 MacroWeka扩展了著名数据挖掘工具weka
💻 JAVA
📖 第 1 页 / 共 2 页
字号:
/*
 *    This program is free software; you can redistribute it and/or modify
 *    it under the terms of the GNU General Public License as published by
 *    the Free Software Foundation; either version 2 of the License, or
 *    (at your option) any later version.
 *
 *    This program is distributed in the hope that it will be useful,
 *    but WITHOUT ANY WARRANTY; without even the implied warranty of
 *    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 *    GNU General Public License for more details.
 *
 *    You should have received a copy of the GNU General Public License
 *    along with this program; if not, write to the Free Software
 *    Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
 */

/*
 *    PredictionAppender.java
 *    Copyright (C) 2003 Mark Hall
 *
 */

package weka.gui.beans;

import java.io.Serializable;
import java.util.Vector;
import java.util.Enumeration;
import javax.swing.JPanel;
import javax.swing.JLabel;
import javax.swing.JTextField;
import java.awt.BorderLayout;
import javax.swing.SwingConstants;
import java.awt.*;
import java.beans.EventSetDescriptor;

import weka.core.Instances;
import weka.core.Instance;
import weka.clusterers.DensityBasedClusterer;

/**
 * Bean that can can accept batch or incremental classifier events
 * and produce dataset or instance events which contain instances with
 * predictions appended.
 *
 * @author <a href="mailto:mhall@cs.waikato.ac.nz">Mark Hall</a>
 * @version $Revision: 1.1 $
 */
public class PredictionAppender extends JPanel
  implements DataSource, Visible, BeanCommon,
	     EventConstraints, BatchClassifierListener,
	     IncrementalClassifierListener, BatchClustererListener, Serializable {

  /**
   * Objects listenening for dataset events
   */
  protected Vector m_dataSourceListeners = new Vector();

  /**
   * Objects listening for instances events
   */
  protected Vector m_instanceListeners = new Vector();

  /**
   * Non null if this object is a target for any events.
   */
  protected Object m_listenee = null;

  /**
   * Format of instances to be produced.
   */
  protected Instances m_format;

  protected BeanVisual m_visual = 
    new BeanVisual("PredictionAppender", 
		   BeanVisual.ICON_PATH+"PredictionAppender.gif",
		   BeanVisual.ICON_PATH+"PredictionAppender_animated.gif");

  /**
   * Append classifier's predicted probabilities (if the class is discrete
   * and the classifier is a distribution classifier)
   */
  protected boolean m_appendProbabilities;

  protected transient weka.gui.Logger m_logger;

  /**
   * Global description of this bean
   *
   * @return a <code>String</code> value
   */
  public String globalInfo() {
    return "Accepts batch or incremental classifier events and "
      +"produces a new data set with classifier predictions appended.";
  }

  /**
   * Creates a new <code>PredictionAppender</code> instance.
   */
  public PredictionAppender() {
    setLayout(new BorderLayout());
    add(m_visual, BorderLayout.CENTER);
  }

  /**
   * Return a tip text suitable for displaying in a GUI
   *
   * @return a <code>String</code> value
   */
  public String appendPredictedProbabilitiesTipText() {
    return "append probabilities rather than labels for discrete class "
      +"predictions";
  }

  /**
   * Return true if predicted probabilities are to be appended rather
   * than class value
   *
   * @return a <code>boolean</code> value
   */
  public boolean getAppendPredictedProbabilities() {
    return m_appendProbabilities;
  }

  /**
   * Set whether to append predicted probabilities rather than
   * class value (for discrete class data sets)
   *
   * @param ap a <code>boolean</code> value
   */
  public void setAppendPredictedProbabilities(boolean ap) {
    m_appendProbabilities = ap;
  }

  /**
   * Add a datasource listener
   *
   * @param dsl a <code>DataSourceListener</code> value
   */
  public synchronized void addDataSourceListener(DataSourceListener dsl) {
    m_dataSourceListeners.addElement(dsl);
    // pass on any format that we might have determined so far
    if (m_format != null) {
      DataSetEvent e = new DataSetEvent(this, m_format);
      dsl.acceptDataSet(e);
    }
  }
  
  /**
   * Remove a datasource listener
   *
   * @param dsl a <code>DataSourceListener</code> value
   */
  public synchronized void removeDataSourceListener(DataSourceListener dsl) {
    m_dataSourceListeners.remove(dsl);
  }

  /**
   * Add an instance listener
   *
   * @param dsl a <code>InstanceListener</code> value
   */
  public synchronized void addInstanceListener(InstanceListener dsl) {
    m_instanceListeners.addElement(dsl);
    // pass on any format that we might have determined so far
    if (m_format != null) {
      InstanceEvent e = new InstanceEvent(this, m_format);
      dsl.acceptInstance(e);
    }
  }
  
  /**
   * Remove an instance listener
   *
   * @param dsl a <code>InstanceListener</code> value
   */
  public synchronized void removeInstanceListener(InstanceListener dsl) {
    m_instanceListeners.remove(dsl);
  }

  /**
   * Set the visual for this data source
   *
   * @param newVisual a <code>BeanVisual</code> value
   */
  public void setVisual(BeanVisual newVisual) {
    m_visual = newVisual;
  }

  /**
   * Get the visual being used by this data source.
   *
   */
  public BeanVisual getVisual() {
    return m_visual;
  }

  /**
   * Use the default images for a data source
   *
   */
  public void useDefaultVisual() {
    m_visual.loadIcons(BeanVisual.ICON_PATH+"PredictionAppender.gif",
		       BeanVisual.ICON_PATH+"PredictionAppender_animated.gif");
  }

  protected InstanceEvent m_instanceEvent;
  protected double [] m_instanceVals;

  /**
   * Accept and process an incremental classifier event
   *
   * @param e an <code>IncrementalClassifierEvent</code> value
   */
  public void acceptClassifier(IncrementalClassifierEvent e) {
    weka.classifiers.Classifier classifier = e.getClassifier();
    Instance currentI = e.getCurrentInstance();
    int status = e.getStatus();
    int oldNumAtts = 0;
    if (status == IncrementalClassifierEvent.NEW_BATCH) {
      oldNumAtts = e.getStructure().numAttributes();
    } else {
      oldNumAtts = currentI.dataset().numAttributes();
    }
    if (status == IncrementalClassifierEvent.NEW_BATCH) {
      m_instanceEvent = new InstanceEvent(this, null, 0);
      // create new header structure
      Instances oldStructure = new Instances(e.getStructure(), 0);
      //String relationNameModifier = oldStructure.relationName()
	//+"_with predictions";
      String relationNameModifier = "_with predictions";
	//+"_with predictions";
       if (!m_appendProbabilities 
	   || oldStructure.classAttribute().isNumeric()) {
	 try {
	   m_format = makeDataSetClass(oldStructure, classifier,
						     relationNameModifier);
	   m_instanceVals = new double [m_format.numAttributes()];
	 } catch (Exception ex) {
	   ex.printStackTrace();
	   return;
	 }
       } else if (m_appendProbabilities) {
	 try {
	   m_format = 
	     makeDataSetProbabilities(oldStructure, classifier,
				      relationNameModifier);
	   m_instanceVals = new double [m_format.numAttributes()];
	 } catch (Exception ex) {
	   ex.printStackTrace();
	   return;
	 }
       }
       // Pass on the structure
       m_instanceEvent.setStructure(m_format);
       notifyInstanceAvailable(m_instanceEvent);
       return;
    }

    Instance newInst;
    try {
      // process the actual instance
      for (int i = 0; i < oldNumAtts; i++) {
	m_instanceVals[i] = currentI.value(i);
      }
      if (!m_appendProbabilities 
	  || currentI.dataset().classAttribute().isNumeric()) {
	double predClass = 
	  classifier.classifyInstance(currentI);
	m_instanceVals[m_instanceVals.length - 1] = predClass;
      } else if (m_appendProbabilities) {
	double [] preds = classifier.distributionForInstance(currentI);
	for (int i = oldNumAtts; i < m_instanceVals.length; i++) {
	  m_instanceVals[i] = preds[i-oldNumAtts];
	}      
      }      
    } catch (Exception ex) {
      ex.printStackTrace();
      return;
    } finally {
      newInst = new Instance(currentI.weight(), m_instanceVals);
      newInst.setDataset(m_format);
      m_instanceEvent.setInstance(newInst);
      m_instanceEvent.setStatus(status);
      // notify listeners
      notifyInstanceAvailable(m_instanceEvent);
    }

    if (status == IncrementalClassifierEvent.BATCH_FINISHED) {
      // clean up
      //      m_incrementalStructure = null;
      m_instanceVals = null;
      m_instanceEvent = null;
    }
  }

  /**
   * Accept and process a batch classifier event
   *
   * @param e a <code>BatchClassifierEvent</code> value
   */
  public void acceptClassifier(BatchClassifierEvent e) {
    if (m_dataSourceListeners.size() > 0) {
      Instances testSet = e.getTestSet().getDataSet();

      weka.classifiers.Classifier classifier = e.getClassifier();
      String relationNameModifier = "_set_"+e.getSetNumber()+"_of_"
	+e.getMaxSetNumber();
      if (!m_appendProbabilities || testSet.classAttribute().isNumeric()) {
	try {
	  Instances newInstances = makeDataSetClass(testSet, classifier,
						    relationNameModifier);
	  notifyDataSetAvailable(new DataSetEvent(this, new Instances(newInstances,0)));
          if (e.getTestSet().isStructureOnly()) {
	    m_format = newInstances;
	  }
	  // fill in predicted values
	  for (int i = 0; i < testSet.numInstances(); i++) {
	    double predClass = 
	      classifier.classifyInstance(testSet.instance(i));
	    newInstances.instance(i).setValue(newInstances.numAttributes()-1,
					      predClass);
	  }
	  // notify listeners
	  notifyDataSetAvailable(new DataSetEvent(this, newInstances));
	  return;
	} catch (Exception ex) {
	  ex.printStackTrace();
	}
      }
      if (m_appendProbabilities) {
	try {
	  Instances newInstances = 

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -