📄 activefeatureacquisitioncvresultproducer.java
字号:
m_CurrentSize = m_LowerSize; } //maxSize should not exceed total number of instances in current fold int maxSize = maxTrainSize(); int trainSize = fullTrain.numInstances(); if(maxSize > trainSize) maxSize = trainSize; while (m_CurrentSize <= maxSize) { // Add in some fields to the key like run and fold number, dataset name Object [] seKey = m_SplitEvaluator.getKey(); Object [] key = new Object [seKey.length + numExtraKeys]; key[0] = Utils.backQuoteChars(m_Instances.relationName()); key[1] = "" + run; key[2] = "" + (fold + 1); key[3] = "" + m_CurrentSize; if(m_IsFraction) key[4] = "" + m_PlotPoints[pointNum]; System.arraycopy(seKey, 0, key, numExtraKeys, seKey.length); if (m_ResultListener.isResultRequired(this, key)) { try { if(m_IsFraction) System.out.println("Run:" + run + " Fold:" + fold + " Size:" + m_CurrentSize + " Fraction:" + m_PlotPoints[pointNum]); else System.out.println("Run:" + run + " Fold:" + fold + " Size:" + m_CurrentSize); if(firstPoint){//the first training set is always randomly selected firstPoint = false; transferRandomInstances(local,global,localToGlobal,m_CurrentSize - prevSize); }else{ //use current classifier to actively select //specified number of local instances //acquire non-local features and add to the training set transferInstances(local,global,localToGlobal,m_CurrentSize - prevSize); } //create training set train.delete(); addInstances(train,global); addInstances(train,local); System.out.println("curr size: "+m_CurrentSize+"\t global size: "+global.numInstances()+"\t local size: "+local.numInstances()+"\t train size: "+train.numInstances()); Object [] seResults = m_SplitEvaluator.getResult(train, test); Object [] results = new Object [seResults.length + 1]; results[0] = getTimestamp(); System.arraycopy(seResults, 0, results, 1, seResults.length); if (m_debugOutput) { String resultName = (""+run+"."+(fold+1)+"."+ m_CurrentSize + "." + Utils.backQuoteChars(runInstances.relationName()) +"." +m_SplitEvaluator.toString()).replace(' ','_'); resultName = Utils.removeSubstring(resultName, "weka.classifiers."); resultName = Utils.removeSubstring(resultName, "weka.filters."); resultName = Utils.removeSubstring(resultName, "weka.attributeSelection."); m_ZipDest.zipit(m_SplitEvaluator.getRawResultOutput(), resultName); } m_ResultListener.acceptResult(this, key, results); } catch (Exception ex) { // Save the train and test datasets for debugging purposes? throw ex; } } prevSize = m_CurrentSize; if (m_PlotPoints != null) { pointNum ++; m_CurrentSize = plotPoint(pointNum); } else { m_CurrentSize += m_StepSize; } } } } /** * Use current classifier to actively select specified number of instances * to be transfered from the local to global pool * * @param local instances with only local features * @param global instances with global features * @param localToGlobal map from local to corresponding global instances * @param num number of instances to pick from the pool */ protected void transferInstances(Instances local, Instances global, HashMap localToGlobal, int num) throws Exception{ Classifier classifier; try{ classifier = ((ClassifierSplitEvaluator)m_SplitEvaluator).getClassifier(); }catch (Exception ex){ throw new Exception("Active feature acquisition is only implemented for evaluators of classifiers."); } if(classifier instanceof ActiveFeatureAcquirer){ //get indices of examples picked by the classifier int []indices = ((ActiveFeatureAcquirer)classifier).selectInstancesForFeatures(local,num); //sort indices in ascending order, but traverse through //the list in the reverse order - this ensures that examples //are deleted in an order that doesn't invalidate indices Arrays.sort(indices); for(int i=(num-1); i>=0; i--){ global.add((Instance)localToGlobal.get(local.instance(indices[i]))); local.delete(indices[i]); } }else{//randomly pick examples from local pool transferRandomInstances(local,global,localToGlobal,num); } } /** * Randomly select specified number of instances * to be transfered from the local to global pool * * @param local instances with only local features * @param global instances with global features * @param localToGlobal map from local to corresponding global instances * @param num number of instances to pick from the pool */ protected void transferRandomInstances(Instances local, Instances global, HashMap localToGlobal, int num) throws Exception{ for(int i=0; i<num; i++){ global.add((Instance)localToGlobal.get(local.instance(0))); local.delete(0); } } /** * Replace non local features with missing values * @param local instances that will be reduced to local features * @param seed seed for random number generator */ protected void ablateFeatures(Instances local, int seed){ if(m_AblationLevel >= 0 && m_RandomAblation) ablateRandomFeatures(local, seed); else{ for(int i=0; i<local.numInstances(); i++){ Instance curr = local.instance(i); for(int j=m_NonLocalStartIndex; j<=m_NonLocalEndIndex; j++){ curr.setMissing(j); } assert !curr.classIsMissing(); } } } /** * Ablate specified fraction of randomly selected features. * @param local instances that will be reduced to local features * @param seed seed for random number generator */ protected void ablateRandomFeatures(Instances local, int seed){ System.out.println("Ablating features randomly"); //produce a list of random indices of features to be ablated int numAtts = m_Instances.numAttributes()-1;//exclude the class attribute int numToSelect = (int) (m_AblationLevel * numAtts); if(numToSelect<1) numToSelect=1;//atleast one feature should be available int []indices = new int [numAtts]; for(int j=0; j<numAtts; j++) indices[j]=j; Random random = new Random(seed); for(int i=0; i<numToSelect; i++){ int selected = random.nextInt(numAtts-i); //swap with index from the end int tmp = indices[selected]; indices[selected] = indices[numAtts - i - 1]; indices[numAtts - i - 1] = tmp; } for(int i=0;i<numAtts;i++) System.out.print(indices[i]+" "); System.out.println(); //perform ablation for(int i=0; i<local.numInstances(); i++){ Instance curr = local.instance(i); for(int j=(numAtts-1); j>(numAtts - 1 - numToSelect); j--){ curr.setMissing(indices[j]); } assert !curr.classIsMissing(); } } /** * Add new instances to the given set of instances. * * @param data given instances * @param newData set of instances to add to given instances */ protected void addInstances(Instances data, Instances newData){ for(int i=0; i<newData.numInstances(); i++) data.add(newData.instance(i)); } /** Determines if the points specified are fractions of the total number of examples */ protected boolean setIsFraction(){ if (m_PlotPoints != null){ if(!isInteger(m_PlotPoints[0]))//if the first point is not an integer m_IsFraction = true; else m_IsFraction = false; } return m_IsFraction; } /** Return the number of training examples for the ith point on the * curve for plotPoints as specified. */ protected int plotPoint(int i) { // If i beyond number of given plot points return a value greater than maximum training size if (i >= m_PlotPoints.length) return maxTrainSize() + 1; double point = m_PlotPoints[i]; // If plot point is an integer (other than a non-initial 1) // treat it as a specific number of examples if (isInteger(point) && !(Utils.eq(point, 1.0) && i!=0)) return (int)point; else // Otherwise, treat it as a percentage of the full set return (int)Math.round(point * maxTrainSize()); } /** Return true if the given double represents an integer value */ protected static boolean isInteger(double val) { return Utils.eq(Math.floor(val), Math.ceil(val)); } /** * Gets the names of each of the columns produced for a single run. * This method should really be static. * * @return an array containing the name of each column */ public String [] getKeyNames() { String [] keyNames = m_SplitEvaluator.getKeyNames(); // Add in the names of our extra key fields int numExtraKeys; if(m_IsFraction) numExtraKeys = 5; else numExtraKeys = 4; String [] newKeyNames = new String [keyNames.length + numExtraKeys]; newKeyNames[0] = DATASET_FIELD_NAME; newKeyNames[1] = RUN_FIELD_NAME; newKeyNames[2] = FOLD_FIELD_NAME; newKeyNames[3] = STEP_FIELD_NAME; if(m_IsFraction) newKeyNames[4] = FRACTION_FIELD_NAME; System.arraycopy(keyNames, 0, newKeyNames, numExtraKeys, keyNames.length); return newKeyNames; } /** * Gets the data types of each of the columns produced for a single run. * This method should really be static. * * @return an array containing objects of the type of each column. The * objects should be Strings, or Doubles. */ public Object [] getKeyTypes() { Object [] keyTypes = m_SplitEvaluator.getKeyTypes(); int numExtraKeys; if(m_IsFraction) numExtraKeys = 5; else numExtraKeys = 4; // Add in the types of our extra fields Object [] newKeyTypes = new String [keyTypes.length + numExtraKeys]; newKeyTypes[0] = new String(); newKeyTypes[1] = new String(); newKeyTypes[2] = new String(); newKeyTypes[3] = new String(); if(m_IsFraction) newKeyTypes[4] = new String(); System.arraycopy(keyTypes, 0, newKeyTypes, numExtraKeys, keyTypes.length); return newKeyTypes; } /** * Gets the names of each of the columns produced for a single run. * This method should really be static. * * @return an array containing the name of each column */ public String [] getResultNames() { String [] resultNames = m_SplitEvaluator.getResultNames(); // Add in the names of our extra Result fields String [] newResultNames = new String [resultNames.length + 1]; newResultNames[0] = TIMESTAMP_FIELD_NAME; System.arraycopy(resultNames, 0, newResultNames, 1, resultNames.length); return newResultNames; } /** * Gets the data types of each of the columns produced for a single run. * This method should really be static. * * @return an array containing objects of the type of each column. The * objects should be Strings, or Doubles. */ public Object [] getResultTypes() { Object [] resultTypes = m_SplitEvaluator.getResultTypes(); // Add in the types of our extra Result fields Object [] newResultTypes = new Object [resultTypes.length + 1]; newResultTypes[0] = new Double(0); System.arraycopy(resultTypes, 0, newResultTypes, 1, resultTypes.length); return newResultTypes; } /** * Gets a description of the internal settings of the result * producer, sufficient for distinguishing a ResultProducer * instance from another with different settings (ignoring * those settings set through this interface). For example, * a cross-validation ResultProducer may have a setting for the * number of folds. For a given state, the results produced should * be compatible. Typically if a ResultProducer is an OptionHandler, * this string will represent the command line arguments required * to set the ResultProducer to that state. * * @return the description of the ResultProducer state, or null * if no state is defined */ public String getCompatibilityState() { String result = "-X " + m_NumFolds + " -S " + getStepSize() + " -L " + getLowerSize() + " -U " + getUpperSize() + " "; if (m_SplitEvaluator == null) { result += "<null SplitEvaluator>"; } else { result += "-W " + m_SplitEvaluator.getClass().getName(); } return result + " --"; } /** * Returns the tip text for this property * @return tip text for this property suitable for * displaying in the explorer/experimenter gui */ public String outputFileTipText() { return "Set the destination for saving raw output. If the rawOutput " +"option is selected, then output from the splitEvaluator for " +"individual folds is saved. If the destination is a directory, " +"then each output is saved to an individual gzip file; if the " +"destination is a file, then each output is saved as an entry " +"in a zip file."; } /** * Get the value of OutputFile. * * @return Value of OutputFile. */ public File getOutputFile() { return m_OutputFile; } /** * Set the value of OutputFile. * * @param newOutputFile Value to assign to OutputFile. */ public void setOutputFile(File newOutputFile) { m_OutputFile = newOutputFile; } /** * Returns the tip text for this property * @return tip text for this property suitable for * displaying in the explorer/experimenter gui */ public String numFoldsTipText() { return "Number of folds to use in cross validation."; } /** * Get the value of NumFolds. * * @return Value of NumFolds. */ public int getNumFolds() { return m_NumFolds; } /** * Set the value of NumFolds. * * @param newNumFolds Value to assign to NumFolds. */ public void setNumFolds(int newNumFolds) { m_NumFolds = newNumFolds; } /** * Returns the tip text for this property * @return tip text for this property suitable for * displaying in the explorer/experimenter gui */ public String lowerSizeTipText() { return "Set the minimum number of instances in a training set. Setting zero " + "here will actually use <stepSize> number of instances at the first " + "step (since performance at zero instances is predictable)"; } /** * Get the value of LowerSize. * * @return Value of LowerSize. */ public int getLowerSize() { return m_LowerSize; } /** * Set the value of LowerSize. * * @param newLowerSize Value to assign to * LowerSize. */ public void setLowerSize(int newLowerSize) {
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -