📄 svmreg.java
字号:
tmpOptions[0] = ""; setRegOptimizer( (RegOptimizer) Utils.forName(RegOptimizer.class, tmpStr, tmpOptions)); } else { setRegOptimizer(new RegSMOImproved()); } tmpStr = Utils.getOption('K', options); tmpOptions = Utils.splitOptions(tmpStr); if (tmpOptions.length != 0) { tmpStr = tmpOptions[0]; tmpOptions[0] = ""; setKernel(Kernel.forName(tmpStr, tmpOptions)); } else { setKernel(new PolyKernel()); } } /** * Gets the current settings of the classifier. * * @return an array of strings suitable for passing to setOptions */ public String[] getOptions() { int i; Vector result; String[] options; result = new Vector(); options = super.getOptions(); for (i = 0; i < options.length; i++) result.add(options[i]); result.add("-C"); result.add("" + getC()); result.add("-N"); result.add("" + m_filterType); result.add("-I"); result.add("" + getRegOptimizer().getClass().getName() + " " + Utils.joinOptions(getRegOptimizer().getOptions())); result.add("-K"); result.add("" + getKernel().getClass().getName() + " " + Utils.joinOptions(getKernel().getOptions())); return (String[]) result.toArray(new String[result.size()]); } /** * Returns default capabilities of the classifier. * * @return the capabilities of this classifier */ public Capabilities getCapabilities() { Capabilities result = getKernel().getCapabilities(); result.setOwner(this); // attribute result.enableAllAttributeDependencies(); // with NominalToBinary we can also handle nominal attributes, but only // if the kernel can handle numeric attributes if (result.handles(Capability.NUMERIC_ATTRIBUTES)) result.enable(Capability.NOMINAL_ATTRIBUTES); result.enable(Capability.MISSING_VALUES); // class result.disableAllClasses(); result.disableAllClassDependencies(); result.enable(Capability.NUMERIC_CLASS); result.enable(Capability.DATE_CLASS); result.enable(Capability.MISSING_CLASS_VALUES); return result; } /** * Method for building the classifier. * * @param instances the set of training instances * @throws Exception if the classifier can't be built successfully */ public void buildClassifier(Instances instances) throws Exception { // can classifier handle the data? getCapabilities().testWithFail(instances); // remove instances with missing class instances = new Instances(instances); instances.deleteWithMissingClass(); // Removes all the instances with weight equal to 0. // MUST be done since condition (8) of Keerthi's paper // is made with the assertion Ci > 0 (See equation (3a). Instances data = new Instances(instances, 0); for (int i = 0; i < instances.numInstances(); i++) { if (instances.instance(i).weight() > 0) { data.add(instances.instance(i)); } } if (data.numInstances() == 0) { throw new Exception("No training instances left after removing " + "instance with either a weight null or a missing class!"); } instances = data; m_onlyNumeric = true; for (int i = 0; i < instances.numAttributes(); i++) { if (i != instances.classIndex()) { if (!instances.attribute(i).isNumeric()) { m_onlyNumeric = false; break; } } } m_Missing = new ReplaceMissingValues(); m_Missing.setInputFormat(instances); instances = Filter.useFilter(instances, m_Missing); if (!m_onlyNumeric) { m_NominalToBinary = new NominalToBinary(); m_NominalToBinary.setInputFormat(instances); instances = Filter.useFilter(instances, m_NominalToBinary); } else { m_NominalToBinary = null; } // retrieve two different class values used to determine filter transformation double y0 = instances.instance(0).classValue(); int index = 1; while (index < instances.numInstances() && instances.instance(index).classValue() == y0) { index++; } if (index == instances.numInstances()) { // degenerate case, all class values are equal // we don't want to deal with this, too much hassle throw new Exception("All class values are the same. At least two class values should be different"); } double y1 = instances.instance(index).classValue(); // apply filters if (m_filterType == FILTER_STANDARDIZE) { m_Filter = new Standardize(); ((Standardize)m_Filter).setIgnoreClass(true); m_Filter.setInputFormat(instances); instances = Filter.useFilter(instances, m_Filter); } else if (m_filterType == FILTER_NORMALIZE) { m_Filter = new Normalize(); ((Normalize)m_Filter).setIgnoreClass(true); m_Filter.setInputFormat(instances); instances = Filter.useFilter(instances, m_Filter); } else { m_Filter = null; } if (m_Filter != null) { double z0 = instances.instance(0).classValue(); double z1 = instances.instance(index).classValue(); m_x1 = (y0-y1) / (z0 - z1); // no division by zero, since y0 != y1 guaranteed => z0 != z1 ??? m_x0 = (y0 - m_x1 * z0); // = y1 - m_x1 * z1 } else { m_x1 = 1.0; m_x0 = 0.0; } m_optimizer.setSVMReg(this); m_optimizer.buildClassifier(instances); } /** * Classifies the given instance using the linear regression function. * * @param instance the test instance * @return the classification * @throws Exception if classification can't be done successfully */ public double classifyInstance(Instance instance) throws Exception { // Filter instance m_Missing.input(instance); m_Missing.batchFinished(); instance = m_Missing.output(); if (!m_onlyNumeric) { m_NominalToBinary.input(instance); m_NominalToBinary.batchFinished(); instance = m_NominalToBinary.output(); } if (m_Filter != null) { m_Filter.input(instance); m_Filter.batchFinished(); instance = m_Filter.output(); } double result = m_optimizer.SVMOutput(instance); return result * m_x1 + m_x0; } /** * Returns the tip text for this property * * @return tip text for this property suitable for * displaying in the explorer/experimenter gui */ public String regOptimizerTipText() { return "The learning algorithm."; } /** * sets the learning algorithm * * @param regOptimizer the learning algorithm */ public void setRegOptimizer(RegOptimizer regOptimizer) { m_optimizer = regOptimizer; } /** * returns the learning algorithm * * @return the learning algorithm */ public RegOptimizer getRegOptimizer() { return m_optimizer; } /** * Returns the tip text for this property * * @return tip text for this property suitable for * displaying in the explorer/experimenter gui */ public String kernelTipText() { return "The kernel to use."; } /** * sets the kernel to use * * @param value the kernel to use */ public void setKernel(Kernel value) { m_kernel = value; } /** * Returns the kernel to use * * @return the current kernel */ public Kernel getKernel() { return m_kernel; } /** * Returns the tip text for this property * * @return tip text for this property suitable for * displaying in the explorer/experimenter gui */ public String cTipText() { return "The complexity parameter C."; } /** * Get the value of C. * * @return Value of C. */ public double getC() { return m_C; } /** * Set the value of C. * * @param v Value to assign to C. */ public void setC(double v) { m_C = v; } /** * Returns the tip text for this property * * @return tip text for this property suitable for * displaying in the explorer/experimenter gui */ public String filterTypeTipText() { return "Determines how/if the data will be transformed."; } /** * Gets how the training data will be transformed. Will be one of * FILTER_NORMALIZE, FILTER_STANDARDIZE, FILTER_NONE. * * @return the filtering mode */ public SelectedTag getFilterType() { return new SelectedTag(m_filterType, TAGS_FILTER); } /** * Sets how the training data will be transformed. Should be one of * FILTER_NORMALIZE, FILTER_STANDARDIZE, FILTER_NONE. * * @param newType the new filtering mode */ public void setFilterType(SelectedTag newType) { if (newType.getTags() == TAGS_FILTER) { m_filterType = newType.getSelectedTag().getID(); } } /** * Prints out the classifier. * * @return a description of the classifier as a string */ public String toString() { StringBuffer text = new StringBuffer(); if (m_optimizer == null || !m_optimizer.modelBuilt()) { return "SVMreg: No model built yet."; } try { text.append(m_optimizer.toString()); } catch (Exception e) { return "Can't print SMVreg classifier."; } return text.toString(); } /** * Returns an enumeration of the measure names. Additional measures * must follow the naming convention of starting with "measure", eg. * double measureBlah() * * @return an enumeration of the measure names */ public Enumeration enumerateMeasures() { Vector result = new Vector(); result.addElement("measureKernelEvaluations"); result.addElement("measureCacheHits"); return result.elements(); } /** * Returns the value of the named measure * @param measureName the name of the measure to query for its value * @return the value of the named measure * @throws IllegalArgumentException if the named measure is not supported */ public double getMeasure(String measureName) { if (measureName.equals("measureKernelEvaluations")) return measureKernelEvaluations(); else if (measureName.equals("measureCacheHits")) return measureCacheHits(); else throw new IllegalArgumentException("Measure '" + measureName + "' is not supported!"); } /** * number of kernel evaluations used in learing * * @return the number of kernel evaluations */ protected double measureKernelEvaluations() { if (m_optimizer != null) { return m_optimizer.getKernelEvaluations(); } else { return 0; } } /** * number of kernel cache hits used during learing * * @return the number of kernel cache hits */ protected double measureCacheHits() { if (m_optimizer != null) { return m_optimizer.getCacheHits(); } else { return 0; } } /** * Main method for running this classifier. * * @param args the commandline options */ public static void main(String[] args) { runClassifier(new SVMreg(), args); }}
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -