⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 datasetattributesoperator.java

📁 一个数据挖掘软件ALPHAMINERR的整个过程的JAVA版源代码
💻 JAVA
📖 第 1 页 / 共 2 页
字号:
/*
 *    This program is free software; you can redistribute it and/or modify
 *    it under the terms of the GNU General Public License as published by
 *    the Free Software Foundation; either version 2 of the License, or
 *    (at your option) any later version.
 *
 *    This program is distributed in the hope that it will be useful,
 *    but WITHOUT ANY WARRANTY; without even the implied warranty of
 *    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 *    GNU General Public License for more details.
 *
 *    You should have received a copy of the GNU General Public License
 *    along with this program; if not, write to the Free Software
 *    Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
 */

/*
 * $Author$
 * $Date$
 * $Revision$
 * 
 */
package eti.bi.alphaminer.patch.standard.operation.operator;


import java.util.Hashtable;
import java.util.Vector;


import com.prudsys.pdm.Core.CategoricalAttribute;
import com.prudsys.pdm.Core.MiningAttribute;
import com.prudsys.pdm.Core.MiningDataSpecification;
import com.prudsys.pdm.Core.MiningException;
import com.prudsys.pdm.Core.NumericAttribute;
import com.prudsys.pdm.Input.MiningStoredData;
import com.prudsys.pdm.Transform.MiningTransformationFactory;
import com.prudsys.pdm.Transform.MiningTransformationStep;
import com.prudsys.pdm.Transform.MultipleToMultiple.RemoveAttributes;
import com.prudsys.pdm.Transform.OneToOne.Categorization;
import com.prudsys.pdm.Transform.OneToOne.Numerization;
import com.prudsys.pdm.Transform.OneToOne.TransTypeCateg;
import com.prudsys.pdm.Transform.OneToOne.TransTypeNum;
import com.prudsys.pdm.Transform.Special.CopyMiningStream;

import eti.bi.alphaminer.core.handler.ICaseHandler;
import eti.bi.alphaminer.core.transform.XelopesTransformAction;
import eti.bi.alphaminer.operation.operator.INodeInfo;
import eti.bi.alphaminer.operation.operator.Operator;
import eti.bi.alphaminer.operation.operator.TransformOperator;
import eti.bi.alphaminer.patch.standard.operation.property.DataSetAttributesOperatorProperty;
import eti.bi.alphaminer.vo.BIData;
import eti.bi.alphaminer.vo.BIObject;
import eti.bi.alphaminer.vo.IBIData;
import eti.bi.alphaminer.vo.IOperatorNode;
import eti.bi.exception.AppException;
import eti.bi.exception.SysException;

/**
 * 
 * DataSetAttributesOperator is a kind of Operator
 */

public class DataSetAttributesOperator extends TransformOperator { 
	
	/**
	 * 
	 */
	private static final long serialVersionUID = 1L;


	/**
	 * @param a_CaseID
	 * @param a_CaseWindow
	 * @param aOperatorInfo
	 */
	public DataSetAttributesOperator(String a_CaseID, INodeInfo aNodeInfo, ICaseHandler aCaseHandler) {
		super(a_CaseID, aNodeInfo, aCaseHandler);
		// TODO Auto-generated constructor stub
	}


	private static final String [] CATEGORICAL_DATA_TYPE = {DataSetAttributesOperatorProperty.STRING};
	private static final int [] CATEGORICAL_DATA_TYPE_VAL = {CategoricalAttribute.STRING};
	private static final String [] NUMERIC_DATA_TYPE = {DataSetAttributesOperatorProperty.DOUBLE};
	private static final int [] NUMERIC_DATA_TYPE_VAL = {NumericAttribute.DOUBLE};
	
	private Hashtable m_Parameters = null;
	private String[] m_AttrNames = null;

	
	/**
	 * Set node id and update operator text of the Set Attributes at the same time.
	 * @param a_NodeID ID of the node
	 */
	public void setNodeID(String a_NodeID) {
		setLabel(getDescription() + " [" + a_NodeID + "]");
		super.setNodeID(a_NodeID);
	}
	
	/**
	 * Set node id and update operator text of the Set Attributes at the same time.
	 * @param a_NodeID ID of the node
	 */
	public void setDescription(String a_Description) {
		m_Description = a_Description;
		setLabel(m_Description + " [" + m_NodeID + "]");		
	}
	
	@SuppressWarnings("unchecked")
	public RemoveAttributes prepareRemoveAttributes(MiningDataSpecification a_MetaData, IOperatorNode a_Node)
	{
		RemoveAttributes removeAttributes = new RemoveAttributes();
		MiningAttribute[] attr = a_MetaData.getAttributesArray();
		String[] sourceAttr = new String[attr.length];
		Vector remove = new Vector();
		Vector param;
		String value = null;
		for (int i=0; i<attr.length; i++)
		{
			sourceAttr[i] = attr[i].getName();			
			param = (Vector)m_Parameters.get(sourceAttr[i]);
			
			value = (String)param.get(2);
			if(value.equalsIgnoreCase(DataSetAttributesOperatorProperty.NOT_USE))
			    remove.addElement(attr[i].getName());
		
		}
		removeAttributes.setSourceName(sourceAttr);
		removeAttributes.setRemoveAttributeNames(remove);
		return removeAttributes;
	}

		
	public MiningTransformationFactory prepareTypeTransformation(MiningDataSpecification a_MetaData, IOperatorNode a_Node, MiningTransformationFactory a_mtf){
	    MiningAttribute[] attr = a_MetaData.getAttributesArray();
		MiningAttribute attribute;
		String name;
		Vector param;
		String value = null;
		int step = 0;
		
		for (int i=0; i<attr.length; i++)
		{
		    attribute = attr[i];
		    name = attr[i].getName();
		    param = (Vector)m_Parameters.get(name);
			
		    value = (String)param.get(2);
		    if(!value.equalsIgnoreCase(DataSetAttributesOperatorProperty.NOT_USE)){
		        if(attribute instanceof NumericAttribute){
		            value = (String)param.get(0);
		            if(value.equalsIgnoreCase(DataSetAttributesOperatorProperty.CATEGORICAL)){
		                Categorization categ = preparecCategorization(name);
		                a_mtf.addOneToOneMapping(categ);
		                step++;
		            }
		        }else  if(attribute instanceof CategoricalAttribute){
		            value = (String)param.get(0);
		            if(value.equalsIgnoreCase(DataSetAttributesOperatorProperty.NUMERIC)){
		                Numerization numerization = preparecNumerization(name);
		                a_mtf.addOneToOneMapping(numerization);
		                step++;
		            }
		        }
		    }
			
		}
		if(step >0)
		    return a_mtf;
		else 
		    return null;
	}
	
	/*
	public MiningTransformationFactory prepareDataTypeTransformation(MiningDataSpecification a_MetaData, OperatorNode a_Node, MiningTransformationFactory a_mtf){
	    MiningAttribute[] attr = a_MetaData.getAttributesArray();
		MiningAttribute attribute;
		String name;
		String [] param;
		String value = null;
		for (int i=0; i<2; i++)
		{
		    attribute = attr[i];
		    name = attr[i].getName();
		    param = new String[3];
			
		    value = a_Node.getParameterValue(name);
			if (value!=null){
			    param = value.split(",");
			    if(!param[2].equalsIgnoreCase(NOT_USE)){
			        if(param[0].equalsIgnoreCase(CATEGORICAL)){
        	            if(Integer.parseInt(param[1])!=attribute.getDataType()){
        	                TransTypeCateg trans = prepareTransTypeCateg(name, Integer.parseInt(param[1]));
        	                a_mtf.addOneToOneMapping(trans);
        	            }
			        }else if(param[0].equalsIgnoreCase(NUMERIC)){
		                if(Integer.parseInt(param[1])!=attribute.getDataType()){
        	                TransTypeNum trans = prepareTransTypeNum(name, Integer.parseInt(param[1]));
        	                a_mtf.addOneToOneMapping(trans);
		                }
		            }
			        
			    }
			}
		}
	    return a_mtf;
	}
	*/
	

	public TransTypeCateg prepareTransTypeCateg(String a_Target, int a_TargetDataType){
	    TransTypeCateg trans = new TransTypeCateg();
	    trans.setSourceName(a_Target);
	    trans.setTargetDataType(a_TargetDataType);
	    return trans;
	}
	
	public TransTypeNum prepareTransTypeNum(String a_Target, int a_TargetDataType){
	    TransTypeNum trans = new TransTypeNum();
	    trans.setSourceName(a_Target);
	    trans.setTargetDataType(a_TargetDataType);
	    return trans;
	}
	
	public Categorization preparecCategorization(String a_Target)
	{
	    Categorization mapping = new Categorization();
		
		mapping.setSourceName(a_Target);

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -