⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 lwr.java

📁 wekaUT是 university texas austin 开发的基于weka的半指导学习(semi supervised learning)的分类器
💻 JAVA
📖 第 1 页 / 共 2 页
字号:
      return;    }    m_WeightKernel = kernel;  }  /**   * Gets the kernel weighting method to use.   *   * @return the new kernel method to use. Will be one of LINEAR,   * INVERSE, or GAUSS   */  public int getWeightingKernel() {    return m_WeightKernel;  }  /**   * Gets an attributes minimum observed value   *   * @param index the index of the attribute   * @return the minimum observed value   */  protected double getAttributeMin(int index) {    return m_Min[index];  }  /**   * Gets an attributes maximum observed value   *   * @param index the index of the attribute   * @return the maximum observed value   */  protected double getAttributeMax(int index) {    return m_Max[index];  }  /**   * Generates the classifier.   *   * @param instances set of instances serving as training data    * @exception Exception if the classifier has not been generated successfully   */  public void buildClassifier(Instances instances) throws Exception {    if (instances.classIndex() < 0) {      throw new Exception("No class attribute assigned to instances");    }    if (instances.classAttribute().type() != Attribute.NUMERIC) {      throw new UnsupportedClassTypeException("Class attribute must be numeric");    }    if (instances.checkForStringAttributes()) {      throw new UnsupportedAttributeTypeException("Cannot handle string attributes!");    }    // Throw away training instances with missing class    m_Train = new Instances(instances, 0, instances.numInstances());    m_Train.deleteWithMissingClass();    // Calculate the minimum and maximum values    m_Min = new double [m_Train.numAttributes()];    m_Max = new double [m_Train.numAttributes()];    for (int i = 0; i < m_Train.numAttributes(); i++) {      m_Min[i] = m_Max[i] = Double.NaN;    }    for (int i = 0; i < m_Train.numInstances(); i++) {      updateMinMax(m_Train.instance(i));    }  }  /**   * Adds the supplied instance to the training set   *   * @param instance the instance to add   * @exception Exception if instance could not be incorporated   * successfully   */  public void updateClassifier(Instance instance) throws Exception {    if (m_Train.equalHeaders(instance.dataset()) == false) {      throw new Exception("Incompatible instance types");    }    if (!instance.classIsMissing()) {      updateMinMax(instance);      m_Train.add(instance);    }  }    /**   * Predicts the class value for the given test instance.   *   * @param instance the instance to be classified   * @return the predicted class value   * @exception Exception if an error occurred during the prediction   */  public double classifyInstance(Instance instance) throws Exception {    if (m_Train.numInstances() == 0) {      throw new Exception("No training instances!");    }    updateMinMax(instance);    // Get the distances to each training instance    double [] distance = new double [m_Train.numInstances()];    for (int i = 0; i < m_Train.numInstances(); i++) {      distance[i] = distance(instance, m_Train.instance(i));    }    int [] sortKey = Utils.sort(distance);    if (m_Debug) {      System.out.println("Instance Distances");      for (int i = 0; i < distance.length; i++) {	System.out.println("" + distance[sortKey[i]]);      }    }    // Determine the bandwidth    int k = sortKey.length - 1;    if (!m_UseAllK && (m_kNN < k)) {      k = m_kNN;    }    double bandwidth = distance[sortKey[k]];    if (bandwidth == distance[sortKey[0]]) {      for (int i = k; i < sortKey.length; i++) {	if (distance[sortKey[i]] > bandwidth) {	  bandwidth = distance[sortKey[i]];	  break;	}      }      if (bandwidth == distance[sortKey[0]]) {	bandwidth *= 10;  // Include them all      }    }    // Rescale the distances by the bandwidth    for (int i = 0; i < distance.length; i++) {      distance[i] = distance[i] / bandwidth;    }    // Pass the distances through a weighting kernel    for (int i = 0; i < distance.length; i++) {      switch (m_WeightKernel) {      case LINEAR:	distance[i] = Math.max(1.0001 - distance[i], 0);	break;      case INVERSE:	distance[i] = 1.0 / (1.0 + distance[i]);	break;      case GAUSS:	distance[i] = Math.exp(-distance[i] * distance[i]);	break;      }    }    if (m_Debug) {      System.out.println("Instance Weights");      for (int i = 0; i < distance.length; i++) {	System.out.println("" + distance[i]);      }    }    // Set the weights on a copy of the training data    Instances weightedTrain = new Instances(m_Train, 0);    for (int i = 0; i < distance.length; i++) {      double weight = distance[sortKey[i]];      if (weight < 1e-20) {	break;      }      Instance newInst = (Instance) m_Train.instance(sortKey[i]).copy();      newInst.setWeight(newInst.weight() * weight);      weightedTrain.add(newInst);    }    if (m_Debug) {      System.out.println("Kept " + weightedTrain.numInstances() + " out of "			 + m_Train.numInstances() + " instances");    }        // Create a weighted linear regression    lr.buildClassifier(weightedTrain);    if (m_Debug) {      System.out.println("Classifying test instance: " + instance);      System.out.println("Built regression model:\n" 			 + lr.toString());    }    // Return the linear regression's prediction    return lr.classifyInstance(instance);  }   /**   * Returns a description of this classifier.   *   * @return a description of this classifier as a string.   */  public String toString() {    if (m_Train == null) {      return "Locally weighted regression: No model built yet.";    }    String result = "Locally weighted regression\n"      + "===========================\n";    switch (m_WeightKernel) {    case LINEAR:      result += "Using linear weighting kernels\n";      break;    case INVERSE:      result += "Using inverse-distance weighting kernels\n";      break;    case GAUSS:      result += "Using gaussian weighting kernels\n";      break;    }    result += "Using " + (m_UseAllK ? "all" : "" + m_kNN) + " neighbours";    return result;  }  /**   * Calculates the distance between two instances   *   * @param test the first instance   * @param train the second instance   * @return the distance between the two given instances, between 0 and 1   */            private double distance(Instance first, Instance second) {      double diff, distance = 0;    int numAttribsUsed = 0;    for(int i = 0; i < m_Train.numAttributes(); i++) {       if (i == m_Train.classIndex()) {	continue;      }      switch (m_Train.attribute(i).type()) {      case Attribute.NOMINAL:	// If attribute is nominal	numAttribsUsed++;	if (first.isMissing(i) || second.isMissing(i) ||	    ((int)first.value(i) != (int)second.value(i))) {	  diff = 1;	} else {	  diff = 0;	}	break;      case Attribute.NUMERIC:	// If attribute is numeric	numAttribsUsed++;		if (first.isMissing(i) || second.isMissing(i)) {	  if (first.isMissing(i) && second.isMissing(i)) {	    diff = 1;	  } else {	    if (second.isMissing(i)) {	      diff = norm(first.value(i),i);	    } else {	      diff = norm(second.value(i),i);	    }	    if (diff < 0.5) {	      diff = 1.0-diff;	    }	  }	} else {	  diff = norm(first.value(i),i) - norm(second.value(i),i);	}	break;      default:	diff = 0;	break;      }      distance += diff * diff;    }    return Math.sqrt(distance / numAttribsUsed);  }  /**   * Normalizes a given value of a numeric attribute.   *   * @param x the value to be normalized   * @param i the attribute's index   */  private double norm(double x,int i) {    if (Double.isNaN(m_Min[i]) || Utils.eq(m_Max[i], m_Min[i])) {      return 0;    } else {      return (x - m_Min[i]) / (m_Max[i] - m_Min[i]);    }  }                        /**   * Updates the minimum and maximum values for all the attributes   * based on a new instance.   *   * @param instance the new instance   */  private void updateMinMax(Instance instance) {      for (int j = 0; j < m_Train.numAttributes(); j++) {      if (!instance.isMissing(j)) {	if (Double.isNaN(m_Min[j])) {	  m_Min[j] = instance.value(j);	  m_Max[j] = instance.value(j);	} else if (instance.value(j) < m_Min[j]) {	  m_Min[j] = instance.value(j);	} else if (instance.value(j) > m_Max[j]) {	  m_Max[j] = instance.value(j);	}      }    }  }  /**   * Main method for testing this class.   *   * @param argv the options   */  public static void main(String [] argv) {    try {      System.out.println(Evaluation.evaluateModel(	    new LWR(), argv));    } catch (Exception e) {      System.err.println(e.getMessage());    }  }}

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -