⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 crf2.java

📁 常用机器学习算法,java编写源代码,内含常用分类算法,包括说明文档
💻 JAVA
📖 第 1 页 / 共 5 页
字号:
					String[] destinationNames = new String[labels.length];					for (int l = 0; l < labels.length; l++)						destinationNames[l] = labels[j]+','+labels[k]+','+labels[l];					addState (labels[i]+','+labels[j]+','+labels[k], 0.0, 0.0,										destinationNames, labels);				}			}		}	}		public void addSelfTransitioningStateForAllLabels (String name)	{		String[] labels = new String[outputAlphabet.size()];		String[] destinationNames  = new String[outputAlphabet.size()];		// This is assuming the the entries in the outputAlphabet are Strings!		for (int i = 0; i < outputAlphabet.size(); i++) {			logger.info("CRF: outputAlphabet.lookup class = "+													outputAlphabet.lookupObject(i).getClass().getName());			labels[i] = (String) outputAlphabet.lookupObject(i);			destinationNames[i] = name;		}		addState (name, 0.0, 0.0, destinationNames, labels);	}		public void setWeights (int weightsIndex, SparseVector transitionWeights)	{		cachedCostStale = cachedGradientStale = true;		if (weightsIndex >= weights.length || weightsIndex < 0)			throw new IllegalArgumentException ("weightsIndex "+weightsIndex+" is out of bounds");		weights[weightsIndex] = transitionWeights;	}	public void setWeights (String weightName, SparseVector transitionWeights)	{		setWeights (getWeightsIndex (weightName), transitionWeights);	}	public String getWeightsName (int weightIndex)	{		return (String) weightAlphabet.lookupObject (weightIndex);	}	public SparseVector getWeights (String weightName)	{		return weights[getWeightsIndex (weightName)];	}	public SparseVector getWeights (int weightIndex)	{		return weights[weightIndex];	}	public void setWeightsDimensionAsIn (InstanceList trainingData)	{		// The cost doesn't actually change, because the "new" parameters will have zero value		// but the gradient changes because the parameters now have different layout.		cachedCostStale = cachedGradientStale = true;		setTrainable (false);		weightsPresent = new BitSet[weights.length];		for (int i = 0; i < weights.length; i++)			weightsPresent[i] = new BitSet();		gatheringWeightsPresent = true;		// Put in the weights that are already there		for (int i = 0; i < weights.length; i++)			for (int j = weights[i].numLocations()-1; j >= 0; j--)				weightsPresent[i].set (weights[i].indexAtLocation(j));		// Put in the weights in the training set		for (int i = 0; i < trainingData.size(); i++) {			Instance instance = trainingData.getInstance(i);			FeatureVectorSequence input = (FeatureVectorSequence) instance.getData();			FeatureSequence output = (FeatureSequence) instance.getTarget();			// Do it for the paths consistent with the labels...			gatheringConstraints = true;			forwardBackward (input, output, true);			gatheringConstraints = false;			if (true)	// perhaps only do this once some training is done				// ...and for the paths selected by the current model (so we will get some negative weights)				forwardBackward (input, null, true);		}		gatheringWeightsPresent = false;		SparseVector[] newWeights = new SparseVector[weights.length];		for (int i = 0; i < weights.length; i++) {			int numLocations = weightsPresent[i].cardinality ();			logger.info("CRF weights["+weightAlphabet.lookupObject(i)+"] num features = "+numLocations);			int[] indices = new int[numLocations];			for (int j = 0; j < numLocations; j++) {				indices[j] = weightsPresent[i].nextSetBit (j == 0 ? 0 : indices[j-1]+1);				//System.out.println ("CRF2 has index "+indices[j]);			}			newWeights[i] = new SparseVector (indices, new double[numLocations],																				numLocations, numLocations, false, false, false);			newWeights[i].plusEqualsSparse (weights[i]);			weights[i] = null;		}		weights = newWeights;	}	/** Increase the size of the weights[] parameters to match (a new, larger)			input Alphabet size */	// No longer needed	/*	public void growWeightsDimensionToInputAlphabet ()	{		int vs = inputAlphabet.size();		if (vs == this.defaultFeatureIndex)			// Doesn't need to grow			return;		assert (vs > this.defaultFeatureIndex);		setTrainable (false);		for (int i = 0; i < weights.length; i++) {			DenseVector newWeights = new DenseVector (vs+1);			newWeights.arrayCopyFrom (0, weights[i]);			newWeights.setValue (vs, weights[i].value (defaultFeatureIndex));			newWeights.setValue (defaultFeatureIndex, 0);			weights[i] = newWeights;		}		this.defaultFeatureIndex = vs;		cachedCostStale = true;		cachedGradientStale = true;	}	*/		// Create a new weight Vector if weightName is new.	public int getWeightsIndex (String weightName)	{		int wi = weightAlphabet.lookupIndex (weightName);		if (wi == -1)			throw new IllegalArgumentException ("Alphabet frozen, and no weight with name "+ weightName);		if (weights == null) {			assert (wi == 0);			weights = new SparseVector[1];			defaultWeights = new double[1];			// Use initial capacity of 8			weights[0] = new SparseVector ();			defaultWeights[0] = 0;			setTrainable (false);		} else if (wi == weights.length) {			SparseVector[] newWeights = new SparseVector[weights.length+1];			double[] newDefaultWeights = new double[weights.length+1];			for (int i = 0; i < weights.length; i++) {				newWeights[i] = weights[i];				newDefaultWeights[i] = defaultWeights[i];			}			newWeights[wi] = new SparseVector ();			newDefaultWeights[wi] = 0;			weights = newWeights;			defaultWeights = newDefaultWeights;			setTrainable (false);		}		return wi;	}		public int numStates () { return states.size(); }	public Transducer.State getState (int index) {		return (Transducer.State) states.get(index); }		public Iterator initialStateIterator () {		return initialStates.iterator (); }	public boolean isTrainable () { return trainable; }	public void setTrainable (boolean f)	{		if (f != trainable) {			if (f) {				constraints = new SparseVector[weights.length];				expectations = new SparseVector[weights.length];				defaultConstraints = new double[weights.length];				defaultExpectations = new double[weights.length];				for (int i = 0; i < weights.length; i++) {					constraints[i] = (SparseVector) weights[i].cloneMatrixZeroed ();					expectations[i] = (SparseVector) weights[i].cloneMatrixZeroed ();				}			} else {				constraints = expectations = null;				defaultConstraints = defaultExpectations = null;			}			for (int i = 0; i < numStates(); i++)				((State)getState(i)).setTrainable(f);			trainable = f;		}	}	public void setParameter (int sourceStateIndex, int destStateIndex, int featureIndex, double value)	{		cachedCostStale = cachedGradientStale = true;		State source = (State)getState(sourceStateIndex);		State dest = (State) getState(destStateIndex);		int rowIndex;		for (rowIndex = 0; rowIndex < source.destinationNames.length; rowIndex++)			if (source.destinationNames[rowIndex].equals (dest.name))				break;		if (rowIndex == source.destinationNames.length)			throw new IllegalArgumentException ("No transtition from state "+sourceStateIndex+" to state "+destStateIndex+".");		int weightsIndex = source.weightsIndices[rowIndex];		if (featureIndex < 0)			defaultWeights[weightsIndex] = value;		else {			weights[weightsIndex].setValue (featureIndex, value);		}	}	public double getParameter (int sourceStateIndex, int destStateIndex, int featureIndex, double value)	{		State source = (State)getState(sourceStateIndex);		State dest = (State) getState(destStateIndex);		int rowIndex;		for (rowIndex = 0; rowIndex < source.destinationNames.length; rowIndex++)			if (source.destinationNames[rowIndex].equals (dest.name))				break;		if (rowIndex == source.destinationNames.length)			throw new IllegalArgumentException ("No transtition from state "+sourceStateIndex+" to state "+destStateIndex+".");		int weightsIndex = source.weightsIndices[rowIndex];		if (featureIndex < 0)			return defaultWeights[weightsIndex];		else			return weights[weightsIndex].value (featureIndex);	}		public void reset ()	{		throw new UnsupportedOperationException ("Not used in CRFs");	}	public void estimate ()	{		if (!trainable)			throw new IllegalStateException ("This transducer not currently trainable.");		// xxx Put stuff in here.		throw new UnsupportedOperationException ("Not yet implemented.  Never?");	}	// yyy	public void print ()	{		StringBuffer sb = new StringBuffer();		for (int i = 0; i < numStates(); i++) {			State s = (State) getState (i);			sb.append (s.name);	sb.append (" ("); sb.append (s.destinations.length); sb.append (" outgoing transitions)\n");			sb.append ("  "); sb.append ("initialCost = "); sb.append (s.initialCost); sb.append ('\n');			sb.append ("  "); sb.append ("finalCost = "); sb.append (s.finalCost); sb.append ('\n');			for (int j = 0; j < s.destinations.length; j++) {				sb.append (" -> ");	sb.append (s.destinations[j].name); sb.append ('\n');				SparseVector transitionWeights = weights[s.weightsIndices[j]];				RankedFeatureVector rfv = new RankedFeatureVector (inputAlphabet, transitionWeights);				sb.append ("  ");				sb.append (s.name); sb.append (" -> "); sb.append (s.destinations[j].name); sb.append (": ");				sb.append ("<DEFAULT_FEATURE> = "); sb.append (defaultWeights[s.weightsIndices[j]]); sb.append('\n');				for (int k = 0; k < rfv.singleSize(); k++) {					double v = rfv.getValueAtRank(k);					int index = rfv.getIndexAtRank(k);					Object feature = inputAlphabet.lookupObject (index);					if (v != 0) {						sb.append ("  ");						sb.append (s.name); sb.append (" -> "); sb.append (s.destinations[j].name); sb.append (": ");						sb.append (feature); sb.append (" = "); sb.append (v); sb.append ('\n');					}				}			}		}		System.out.println (sb.toString());	}	// Java question:	// If I make a non-static inner class CRF.Trainer,	// can that class by subclassed in another .java file,	// and can that subclass still have access to all the CRF's	// instance variables?	public boolean train (InstanceList ilist)	{		return train (ilist, (InstanceList)null, (InstanceList)null);	}	public boolean train (InstanceList ilist, InstanceList validation, InstanceList testing)	{		return train (ilist, validation, testing, (TransducerEvaluator)null);	}		public boolean train (InstanceList ilist, InstanceList validation, InstanceList testing,										 TransducerEvaluator eval)	{		return train (ilist, validation, testing, eval, 9999);	}	public boolean train (InstanceList ilist, InstanceList validation, InstanceList testing,												TransducerEvaluator eval, int numIterations)	{		if (numIterations <= 0)			return false;		assert (ilist.size() > 0);		setWeightsDimensionAsIn (ilist);		MinimizableCRF mc = new MinimizableCRF (ilist, this);		//Minimizer.ByGradient minimizer = new ConjugateGradient (0.001);		Minimizer.ByGradient minimizer = new LimitedMemoryBFGS();		int i;		boolean continueTraining = true;		boolean converged = false;		logger.info("CRF about to train with "+numIterations+" iterations");		for (i = 0; i < numIterations; i++) {			try {				converged = minimizer.minimize (mc, 1);				logger.info("CRF finished one iteration of minimizer, i="+i);			} catch (IllegalArgumentException e) {				e.printStackTrace();				logger.info("Catching exception; saying converged.");				converged = true;			}			if (eval != null) {				continueTraining = eval.evaluate (this, (converged || i == numIterations-1), i,																					converged, mc.getCost(), ilist, validation, testing);				if (!continueTraining)					break;			}			if (converged) {				logger.info("CRF training has converged, i="+i);				break;			}		}		logger.info("About to setTrainable(false)");		// Free the memory of the expectations and constraints		setTrainable (false);		logger.info("Done setTrainable(false)");		return converged;	}	public boolean train (InstanceList training, InstanceList validation, InstanceList testing,												TransducerEvaluator eval, int numIterations,												int numIterationsPerProportion,												double[] trainingProportions)	{		int trainingIteration = 0;		for (int i = 0; i < trainingProportions.length; i++) {			// Train the CRF			InstanceList theTrainingData = training;			if (trainingProportions != null && i < trainingProportions.length) {				logger.info("Training on "+trainingProportions[i]+"% of the data this round.");				InstanceList[] sampledTrainingData = training.split (new Random(1),

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -