⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 smo.java

📁 wekaUT是 university texas austin 开发的基于weka的半指导学习(semi supervised learning)的分类器
💻 JAVA
📖 第 1 页 / 共 4 页
字号:
    return vec;  }  /**   * Returns an enumeration describing the available options.   *   * @return an enumeration of all the available options.   */  public Enumeration listOptions() {    Vector newVector = new Vector(13);    newVector.addElement(new Option("\tThe complexity constant C. (default 1)",				    "C", 1, "-C <double>"));    newVector.addElement(new Option("\tThe exponent for the "				    + "polynomial kernel. (default 1)",				    "E", 1, "-E <double>"));    newVector.addElement(new Option("\tGamma for the "				    + "RBF kernel. (default 0.01)",				    "G", 1, "-G <double>"));    newVector.addElement(new Option("\tWhether to 0=normalize/1=standardize/2=neither. " +				    "(default 0=normalize)",				    "N", 1, "-N"));    newVector.addElement(new Option("\tFeature-space normalization (only for non-linear polynomial kernels).",				    "F", 0, "-F"));    newVector.addElement(new Option("\tUse lower-order terms (only for non-linear polynomial kernels).",				    "O", 0, "-O"));    newVector.addElement(new Option("\tUse RBF kernel. " +    				    "(default poly)",				    "R", 0, "-R"));    newVector.addElement(new Option("\tThe size of the kernel cache. " +				    "(default 1000003)",				    "A", 1, "-A <int>"));    newVector.addElement(new Option("\tThe tolerance parameter. " +				    "(default 1.0e-3)",				    "T", 1, "-T <double>"));    newVector.addElement(new Option("\tThe epsilon for round-off error. " +				    "(default 1.0e-12)",				    "P", 1, "-P <double>"));    newVector.addElement(new Option("\tFit logistic models to SVM outputs. ",				    "M", 0, "-M"));    newVector.addElement(new Option("\tThe number of folds for the internal cross-validation. " +				    "(default -1, use training data)",				    "V", 1, "-V <double>"));    newVector.addElement(new Option("\tThe random number seed for the internal cross-validation. " +				    "(default 1)",				    "W", 1, "-W <double>"));    return newVector.elements();  }  /**   * Parses a given list of options. Valid options are:<p>   *   * -C num <br>   * The complexity constant C. (default 1)<p>   *   * -E num <br>   * The exponent for the polynomial kernel. (default 1) <p>   *   * -G num <br>   * Gamma for the RBF kernel. (default 0.01) <p>   *   * -N <0|1|2> <br>   * Whether to 0=normalize/1=standardize/2=neither. (default 0=normalize)<p>   *   * -F <br>   * Feature-space normalization (only for  non-linear polynomial kernels). <p>   *   * -O <br>   * Use lower-order terms (only for non-linear polynomial kernels). <p>   *   * -R <br>   * Use RBF kernel (default poly). <p>   *    * -A num <br>   * Sets the size of the kernel cache. Should be a prime number. (default 1000003) <p>   *   * -T num <br>   * Sets the tolerance parameter. (default 1.0e-3)<p>   *   * -P num <br>    * Sets the epsilon for round-off error. (default 1.0e-12)<p>   *   * -M <br>   * Fit logistic models to SVM outputs.<p>   *   * -V num <br>   * Number of runs for cross-validation used to generate data   * for logistic models. (default -1, use training data)   *   * -W num <br>   * Random number seed for cross-validation. (default 1)   *   * @param options the list of options as an array of strings   * @exception Exception if an option is not supported    */  public void setOptions(String[] options) throws Exception {        String complexityString = Utils.getOption('C', options);    if (complexityString.length() != 0) {      m_C = (new Double(complexityString)).doubleValue();    } else {      m_C = 1.0;    }    String exponentsString = Utils.getOption('E', options);    if (exponentsString.length() != 0) {      m_exponent = (new Double(exponentsString)).doubleValue();    } else {      m_exponent = 1.0;    }    String gammaString = Utils.getOption('G', options);    if (gammaString.length() != 0) {      m_gamma = (new Double(gammaString)).doubleValue();    } else {      m_gamma = 0.01;    }    String cacheString = Utils.getOption('A', options);    if (cacheString.length() != 0) {      m_cacheSize = Integer.parseInt(cacheString);    } else {      m_cacheSize = 1000003;    }    String toleranceString = Utils.getOption('T', options);    if (toleranceString.length() != 0) {      m_tol = (new Double(toleranceString)).doubleValue();    } else {      m_tol = 1.0e-3;    }    String epsilonString = Utils.getOption('P', options);    if (epsilonString.length() != 0) {      m_eps = (new Double(epsilonString)).doubleValue();    } else {      m_eps = 1.0e-12;    }    m_useRBF = Utils.getFlag('R', options);    String nString = Utils.getOption('N', options);    if (nString.length() != 0) {      setFilterType(new SelectedTag(Integer.parseInt(nString), TAGS_FILTER));    } else {      setFilterType(new SelectedTag(FILTER_NORMALIZE, TAGS_FILTER));    }    m_featureSpaceNormalization = Utils.getFlag('F', options);    if ((m_useRBF) && (m_featureSpaceNormalization)) {      throw new Exception("RBF machine doesn't require feature-space normalization.");    }    if ((m_exponent == 1.0) && (m_featureSpaceNormalization)) {      throw new Exception("Can't use feature-space normalization with linear machine.");    }    m_lowerOrder = Utils.getFlag('O', options);    if ((m_useRBF) && (m_lowerOrder)) {      throw new Exception("Can't use lower-order terms with RBF machine.");    }    if ((m_exponent == 1.0) && (m_lowerOrder)) {      throw new Exception("Can't use lower-order terms with linear machine.");    }    m_fitLogisticModels = Utils.getFlag('M', options);    String foldsString = Utils.getOption('V', options);    if (foldsString.length() != 0) {      m_numFolds = Integer.parseInt(foldsString);    } else {      m_numFolds = -1;    }    String randomSeedString = Utils.getOption('W', options);    if (randomSeedString.length() != 0) {      m_randomSeed = Integer.parseInt(randomSeedString);    } else {      m_randomSeed = 1;    }  }  /**   * Gets the current settings of the classifier.   *   * @return an array of strings suitable for passing to setOptions   */  public String [] getOptions() {    String [] options = new String [21];    int current = 0;    options[current++] = "-C"; options[current++] = "" + m_C;    options[current++] = "-E"; options[current++] = "" + m_exponent;    options[current++] = "-G"; options[current++] = "" + m_gamma;    options[current++] = "-A"; options[current++] = "" + m_cacheSize;    options[current++] = "-T"; options[current++] = "" + m_tol;    options[current++] = "-P"; options[current++] = "" + m_eps;    options[current++] = "-N"; options[current++] = "" + m_filterType;    if (m_featureSpaceNormalization) {      options[current++] = "-F";    }    if (m_lowerOrder) {      options[current++] = "-O";    }    if (m_useRBF) {      options[current++] = "-R";    }    if (m_fitLogisticModels) {      options[current++] = "-M";    }    options[current++] = "-V"; options[current++] = "" + m_numFolds;    options[current++] = "-W"; options[current++] = "" + m_randomSeed;        while (current < options.length) {      options[current++] = "";    }    return options;  }    /**   * Get the value of exponent.    *   * @return Value of exponent.   */  public double getExponent() {        return m_exponent;  }    /**   * Set the value of exponent. If linear kernel   * is used, rescaling and lower-order terms are   * turned off.   *   * @param v  Value to assign to exponent.   */  public void setExponent(double v) {        if (v == 1.0) {      m_featureSpaceNormalization = false;      m_lowerOrder = false;    }    m_exponent = v;  }    /**   * Get the value of gamma.    *   * @return Value of gamma.   */  public double getGamma() {        return m_gamma;  }    /**   * Set the value of gamma.    *   * @param v  Value to assign to gamma.   */  public void setGamma(double v) {        m_gamma = v;  }    /**   * Get the value of C.   *   * @return Value of C.   */  public double getC() {        return m_C;  }    /**   * Set the value of C.   *   * @param v  Value to assign to C.   */  public void setC(double v) {        m_C = v;  }    /**   * Get the value of tolerance parameter.   * @return Value of tolerance parameter.   */  public double getToleranceParameter() {        return m_tol;  }    /**   * Set the value of tolerance parameter.   * @param v  Value to assign to tolerance parameter.   */  public void setToleranceParameter(double v) {        m_tol = v;  }    /**   * Get the value of epsilon.   * @return Value of epsilon.   */  public double getEpsilon() {        return m_eps;  }    /**   * Set the value of epsilon.   * @param v  Value to assign to epsilon.   */  public void setEpsilon(double v) {        m_eps = v;  }    /**   * Get the size of the kernel cache   * @return Size of kernel cache.   */  public int getCacheSize() {        return m_cacheSize;  }    /**   * Set the value of the kernel cache.   * @param v  Size of kernel cache.   */  public void setCacheSize(int v) {        m_cacheSize = v;  }    /**   * Gets how the training data will be transformed. Will be one of   * FILTER_NORMALIZE, FILTER_STANDARDIZE, FILTER_NONE.   *   * @return the filtering mode   */  public SelectedTag getFilterType() {    return new SelectedTag(m_filterType, TAGS_FILTER);  }    /**   * Sets how the training data will be transformed. Should be one of   * FILTER_NORMALIZE, FILTER_STANDARDIZE, FILTER_NONE.   *   * @param newType the new filtering mode   */  public void setFilterType(SelectedTag newType) {        if (newType.getTags() == TAGS_FILTER) {      m_filterType = newType.getSelectedTag().getID();    }  }  /**   * Check if the RBF kernel is to be used.   * @return true if RBF   */  public boolean getUseRBF() {        return m_useRBF;  }    /**   * Set if the RBF kernel is to be used.   * @param v  true if RBF   */  public void setUseRBF(boolean v) {    if (v) {      m_featureSpaceNormalization = false;      m_lowerOrder = false;    }    m_useRBF = v;  }    /**   * Check whether feature spaces is being normalized.   * @return true if feature space is normalized.   */  public boolean getFeatureSpaceNormalization() throws Exception {    return m_featureSpaceNormalization;  }    /**   * Set whether feature space is normalized.   * @param v  true if feature space is to be normalized.   */  public void setFeatureSpaceNormalization(boolean v) throws Exception {        if ((m_useRBF) || (m_exponent == 1.0)) {      m_featureSpaceNormalization = false;    } else {      m_featureSpaceNormalization = v;    }  }    /**   * Check whether lower-order terms are being used.   * @return Value of lowerOrder.   */  public boolean getLowerOrderTerms() {        return m_lowerOrder;  }    /**   * Set whether lower-order terms are to be used. Defaults   * to false if a linear machine is built.   * @param v  Value to assign to lowerOrder.   */  public void setLowerOrderTerms(boolean v) {        if (m_exponent == 1.0 || m_useRBF) {      m_lowerOrder = false;    } else {      m_lowerOrder = v;    }  }    /**   * Get the value of buildLogisticModels.   *   * @return Value of buildLogisticModels.   */  public boolean getBuildLogisticModels() {        return m_fitLogisticModels;  }    /**   * Set the value of buildLogisticModels.   *   * @param newbuildLogisticModels Value to assign to buildLogisticModels.   */  public void setBuildLogisticModels(boolean newbuildLogisticModels) {        m_fitLogisticModels = newbuildLogisticModels;  }    /**   * Get the value of numFolds.   *   * @return Value of numFolds.   */  public int getNumFolds() {        return m_numFolds;  }    /**   * Set the value of numFolds.   *   * @param newnumFolds Value to assign to numFolds.   */  public void setNumFolds(int newnumFolds) {        m_numFolds = newnumFolds;  }    /**   * Get the value of randomSeed.   *   * @return Value of randomSeed.   */  public int getRandomSeed() {        return m_randomSeed;  }    /**   * Set the value of randomSeed.   *   * @param newrandomSeed Value to assign to randomSeed.   */  public void setRandomSeed(int newrandomSeed) {        m_randomSeed = newrandomSeed;  }    /**   * Prints out the classifier.   *   * @return a description of the classifier as a string   */  public String toString() {        StringBuffer text = new StringBuffer();    int printed = 0;        if ((m_classAttribute == null)) {      return "SMO: No model built yet.";    }    try {      text.append("SMO\n\n");      for (int i = 0; i < m_classAttribute.numValues(); i++) {	for (int j = i + 1; j < m_classAttribute.numValues(); j++) {	  text.append("Classifier for classes: " + 		      m_classAttribute.value(i) + ", " +		      m_classAttribute.value(j) + "\n\n");	  text.append(m_classifiers[i][j]);	  if (m_fitLogisticModels) {	    text.append("\n\n" + m_classifiers[i][j].m_logistic);	  }	  text.append("\n\n");	}      }    } catch (Exception e) {      return "Can't print SMO classifier.";    }        return text.toString();  }    /**   * Main method for testing this class.   */  public static void main(String[] argv) {    Classifier scheme;    try {      scheme = new SMO();      System.out.println(Evaluation.evaluateModel(scheme, argv));    } catch (Exception e) {      e.printStackTrace();      System.err.println(e.getMessage());    }  }}    

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -