📄 conjunctiverule.java
字号:
"\t(default:use randomization)","R", 0, "-R")); newVector.addElement(new Option("\tSet whether consider the exclusive\n" + "\texpressions for nominal attributes\n"+ "\t(default false)","E", 0, "-E")); newVector.addElement(new Option("\tSet the minimal weights of instances\n" + "\twithin a split.\n" + "\t(default 2.0)","M", 1, "-M <min. weights>")); newVector.addElement(new Option("\tSet number of antecedents for pre-pruning\n" + "\tif -1, then REP is used\n" + "\t(default -1)","P", 1, "-P <number of antecedents>")); newVector.addElement(new Option("\tSet the seed of randomization\n" + "\t(default 1)","S", 1, "-S <seed>")); return newVector.elements(); } /** * Parses a given list of options. <p/> * <!-- options-start --> * Valid options are: <p/> * * <pre> -N <number of folds> * Set number of folds for REP * One fold is used as pruning set. * (default 3)</pre> * * <pre> -R * Set if NOT uses randomization * (default:use randomization)</pre> * * <pre> -E * Set whether consider the exclusive * expressions for nominal attributes * (default false)</pre> * * <pre> -M <min. weights> * Set the minimal weights of instances * within a split. * (default 2.0)</pre> * * <pre> -P <number of antecedents> * Set number of antecedents for pre-pruning * if -1, then REP is used * (default -1)</pre> * * <pre> -S <seed> * Set the seed of randomization * (default 1)</pre> * <!-- options-end --> * * @param options the list of options as an array of strings * @throws Exception if an option is not supported */ public void setOptions(String[] options) throws Exception { String numFoldsString = Utils.getOption('N', options); if (numFoldsString.length() != 0) m_Folds = Integer.parseInt(numFoldsString); else m_Folds = 3; String minNoString = Utils.getOption('M', options); if (minNoString.length() != 0) m_MinNo = Double.parseDouble(minNoString); else m_MinNo = 2.0; String seedString = Utils.getOption('S', options); if (seedString.length() != 0) m_Seed = Integer.parseInt(seedString); else m_Seed = 1; String numAntdsString = Utils.getOption('P', options); if (numAntdsString.length() != 0) m_NumAntds = Integer.parseInt(numAntdsString); else m_NumAntds = -1; m_IsExclude = Utils.getFlag('E', options); } /** * Gets the current settings of the Classifier. * * @return an array of strings suitable for passing to setOptions */ public String [] getOptions() { String [] options = new String [9]; int current = 0; options[current++] = "-N"; options[current++] = "" + m_Folds; options[current++] = "-M"; options[current++] = "" + m_MinNo; options[current++] = "-P"; options[current++] = "" + m_NumAntds; options[current++] = "-S"; options[current++] = "" + m_Seed; if(m_IsExclude) options[current++] = "-E"; while (current < options.length) options[current++] = ""; return options; } /** The access functions for parameters */ /** * Returns the tip text for this property * @return tip text for this property suitable for * displaying in the explorer/experimenter gui */ public String foldsTipText() { return "Determines the amount of data used for pruning. One fold is used for " + "pruning, the rest for growing the rules."; } /** * the number of folds to use * * @param folds the number of folds to use */ public void setFolds(int folds) { m_Folds = folds; } /** * returns the current number of folds * * @return the number of folds */ public int getFolds() { return m_Folds; } /** * Returns the tip text for this property * @return tip text for this property suitable for * displaying in the explorer/experimenter gui */ public String seedTipText() { return "The seed used for randomizing the data."; } /** * sets the seed for randomizing the data * * @param s the seed value */ public void setSeed(long s) { m_Seed = s; } /** * returns the current seed value for randomizing the data * * @return the seed value */ public long getSeed() { return m_Seed; } /** * Returns the tip text for this property * @return tip text for this property suitable for * displaying in the explorer/experimenter gui */ public String exclusiveTipText() { return "Set whether to consider exclusive expressions for nominal " + "attribute splits."; } /** * Returns whether exclusive expressions for nominal attributes splits are * considered * * @return true if exclusive expressions for nominal attributes splits are * considered */ public boolean getExclusive() { return m_IsExclude; } /** * Sets whether exclusive expressions for nominal attributes splits are * considered * * @param e whether to consider exclusive expressions for nominal attribute * splits */ public void setExclusive(boolean e) { m_IsExclude = e; } /** * Returns the tip text for this property * @return tip text for this property suitable for * displaying in the explorer/experimenter gui */ public String minNoTipText() { return "The minimum total weight of the instances in a rule."; } /** * Sets the minimum total weight of the instances in a rule * * @param m the minimum total weight of the instances in a rule */ public void setMinNo(double m) { m_MinNo = m; } /** * Gets the minimum total weight of the instances in a rule * * @return the minimum total weight of the instances in a rule */ public double getMinNo(){ return m_MinNo; } /** * Returns the tip text for this property * @return tip text for this property suitable for * displaying in the explorer/experimenter gui */ public String numAntdsTipText() { return "Set the number of antecedents allowed in the rule if " + "pre-pruning is used. If this value is other than -1, then " + "pre-pruning will be used, otherwise the rule uses reduced-error " + "pruning."; } /** * Sets the number of antecedants * * @param n the number of antecedants */ public void setNumAntds(int n) { m_NumAntds = n; } /** * Gets the number of antecedants * * @return the number of antecedants */ public int getNumAntds(){ return m_NumAntds; } /** * Returns default capabilities of the classifier. * * @return the capabilities of this classifier */ public Capabilities getCapabilities() { Capabilities result = super.getCapabilities(); // attributes result.enable(Capability.NOMINAL_ATTRIBUTES); result.enable(Capability.NUMERIC_ATTRIBUTES); result.enable(Capability.DATE_ATTRIBUTES); result.enable(Capability.MISSING_VALUES); // class result.enable(Capability.NOMINAL_CLASS); result.enable(Capability.NUMERIC_CLASS); result.enable(Capability.DATE_CLASS); result.enable(Capability.MISSING_CLASS_VALUES); return result; } /** * Builds a single rule learner with REP dealing with nominal classes or * numeric classes. * For nominal classes, this rule learner predicts a distribution on * the classes. * For numeric classes, this learner predicts a single value. * * @param instances the training data * @throws Exception if classifier can't be built successfully */ public void buildClassifier(Instances instances) throws Exception { // can classifier handle the data? getCapabilities().testWithFail(instances); // remove instances with missing class Instances data = new Instances(instances); data.deleteWithMissingClass(); if(data.numInstances() < m_Folds) throw new Exception("Not enough data for REP."); m_ClassAttribute = data.classAttribute(); if(m_ClassAttribute.isNominal()) m_NumClasses = m_ClassAttribute.numValues(); else m_NumClasses = 1; m_Antds = new FastVector(); m_DefDstr = new double[m_NumClasses]; m_Cnsqt = new double[m_NumClasses]; m_Targets = new FastVector(); m_Random = new Random(m_Seed); if(m_NumAntds != -1){ grow(data); } else{ data.randomize(m_Random); // Split data into Grow and Prune data.stratify(m_Folds); Instances growData=data.trainCV(m_Folds, m_Folds-1, m_Random); Instances pruneData=data.testCV(m_Folds, m_Folds-1); grow(growData); // Build this rule prune(pruneData); // Prune this rule } if(m_ClassAttribute.isNominal()){ Utils.normalize(m_Cnsqt); if(Utils.gr(Utils.sum(m_DefDstr), 0)) Utils.normalize(m_DefDstr); } } /** * Computes class distribution for the given instance. * * @param instance the instance for which distribution is to be computed * @return the class distribution for the given instance * @throws Exception if given instance is null */ public double[] distributionForInstance(Instance instance) throws Exception { if(instance == null) throw new Exception("Testing instance is NULL!"); if (isCover(instance)) return m_Cnsqt; else return m_DefDstr; } /** * Whether the instance covered by this rule * * @param datum the instance in question * @return the boolean value indicating whether the instance is covered by this rule */ public boolean isCover(Instance datum){ boolean isCover=true; for(int i=0; i<m_Antds.size(); i++){ Antd antd = (Antd)m_Antds.elementAt(i); if(!antd.isCover(datum)){ isCover = false; break; } } return isCover; } /** * Whether this rule has antecedents, i.e. whether it is a default rule * * @return the boolean value indicating whether the rule has antecedents */ public boolean hasAntds(){ if (m_Antds == null) return false; else return (m_Antds.size() > 0); } /** * Build one rule using the growing data * * @param data the growing data used to build the rule */ private void grow(Instances data){ Instances growData = new Instances(data); double defInfo; double whole = data.sumOfWeights(); if(m_NumAntds != 0){ /* Class distribution for data both covered and not covered by one antecedent */ double[][] classDstr = new double[2][m_NumClasses]; /* Compute the default information of the growing data */ for(int j=0; j < m_NumClasses; j++){ classDstr[0][j] = 0; classDstr[1][j] = 0; } if(m_ClassAttribute.isNominal()){ for(int i=0; i < growData.numInstances(); i++){ Instance datum = growData.instance(i); classDstr[0][(int)datum.classValue()] += datum.weight(); } defInfo = ContingencyTables.entropy(classDstr[0]); } else{
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -