⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 crf4.java

📁 常用机器学习算法,java编写源代码,内含常用分类算法,包括说明文档
💻 JAVA
📖 第 1 页 / 共 5 页
字号:
	{		return defaultWeights;	}	// Methods added by Ryan McDonald	// Purpose is for AGIS-Limited Memory Experiments	// Allows one to train on AGIS for N iterations, and then	// copy weights to begin training on Limited-Memory for the	// rest.	public SparseVector[] getWeights ()	{		return weights;	}		public void setWeights (SparseVector[] m) {		weights = m;	}	public void setWeightsDimensionAsIn (InstanceList trainingData)	{		// The value doesn't actually change, because the "new" parameters will have zero value		// but the gradient changes because the parameters now have different layout.		cachedValueStale = cachedGradientStale = true;		setTrainable (false);		weightsPresent = new BitSet[weights.length];		for (int i = 0; i < weights.length; i++)			weightsPresent[i] = new BitSet();		gatheringWeightsPresent = true;		// Put in the weights that are already there		for (int i = 0; i < weights.length; i++) 			for (int j = weights[i].numLocations()-1; j >= 0; j--)				weightsPresent[i].set (weights[i].indexAtLocation(j));		// Put in the weights in the training set		if (this.someTrainingDone) System.err.println("Some training done previously");		for (int i = 0; i < trainingData.size(); i++) {			Instance instance = trainingData.getInstance(i);			FeatureVectorSequence input = (FeatureVectorSequence) instance.getData();			FeatureSequence output = (FeatureSequence) instance.getTarget();			// Do it for the paths consistent with the labels...			gatheringConstraints = true;			forwardBackward (input, output, true);			// ...and also do it for the paths selected by the current model (so we will get some negative weights)			gatheringConstraints = false;			if (this.someTrainingDone) 				// (do this once some training is done)				forwardBackward (input, null, true);		}		gatheringWeightsPresent = false;		SparseVector[] newWeights = new SparseVector[weights.length];		for (int i = 0; i < weights.length; i++) {			int numLocations = weightsPresent[i].cardinality ();			logger.info ("CRF weights["+weightAlphabet.lookupObject(i)+"] num features = "+numLocations);			int[] indices = new int[numLocations];			for (int j = 0; j < numLocations; j++) {				indices[j] = weightsPresent[i].nextSetBit (j == 0 ? 0 : indices[j-1]+1);				//System.out.println ("CRF4 has index "+indices[j]);			}			newWeights[i] = new IndexedSparseVector (indices, new double[numLocations],																							 numLocations, numLocations, false, false, false);			newWeights[i].plusEqualsSparse (weights[i]);		}		weights = newWeights;	}	public void setWeightsDimensionDensely ()	{		SparseVector[] newWeights = new SparseVector [weights.length];		int max = inputAlphabet.size();		int numWeights = 0;		logger.info ("CRF using dense weights, num input features = "+max);		for (int i = 0; i < weights.length; i++) {			newWeights [i] = new SparseVector (null, new double [max],																				 max, max, false, false, false);			newWeights [i].plusEqualsSparse (weights [i]);			numWeights += max;		}		logger.info("Number of weights = "+numWeights);		weights = newWeights;	}	/** Increase the size of the weights[] parameters to match (a new, larger)			input Alphabet size */	// No longer needed	/*	public void growWeightsDimensionToInputAlphabet ()	{		int vs = inputAlphabet.size();		if (vs == this.defaultFeatureIndex)			// Doesn't need to grow			return;		assert (vs > this.defaultFeatureIndex);		setTrainable (false);		for (int i = 0; i < weights.length; i++) {			DenseVector newWeights = new DenseVector (vs+1);			newWeights.arrayCopyFrom (0, weights[i]);			newWeights.setValue (vs, weights[i].value (defaultFeatureIndex));			newWeights.setValue (defaultFeatureIndex, 0);			weights[i] = newWeights;		}		this.defaultFeatureIndex = vs;		cachedValueStale = true;		cachedGradientStale = true;	}	*/		// Create a new weight Vector if weightName is new.	public int getWeightsIndex (String weightName)	{		int wi = weightAlphabet.lookupIndex (weightName);		if (wi == -1)			throw new IllegalArgumentException ("Alphabet frozen, and no weight with name "+ weightName);		if (weights == null) {			assert (wi == 0);			weights = new SparseVector[1];			defaultWeights = new double[1];			featureSelections = new FeatureSelection[1];			// Use initial capacity of 8			weights[0] = new SparseVector ();			defaultWeights[0] = 0;			featureSelections[0] = null;		} else if (wi == weights.length) {			SparseVector[] newWeights = new SparseVector[weights.length+1];			double[] newDefaultWeights = new double[weights.length+1];			FeatureSelection[] newFeatureSelections = new FeatureSelection[weights.length+1];			for (int i = 0; i < weights.length; i++) {				newWeights[i] = weights[i];				newDefaultWeights[i] = defaultWeights[i];				newFeatureSelections[i] = featureSelections[i];			}			newWeights[wi] = new IndexedSparseVector ();			newDefaultWeights[wi] = 0;			newFeatureSelections[wi] = null;			weights = newWeights;			defaultWeights = newDefaultWeights;			featureSelections = newFeatureSelections;		}		setTrainable (false);		return wi;	}		public int numStates () { return states.size(); }	public Transducer.State getState (int index) {		return (Transducer.State) states.get(index); }		public Iterator initialStateIterator () {		return initialStates.iterator (); }	public boolean isTrainable () { return trainable; }	public void setTrainable (boolean f)	{		if (f != trainable) {			if (f) {				constraints = new SparseVector[weights.length];				expectations = new SparseVector[weights.length];				defaultConstraints = new double[weights.length];				defaultExpectations = new double[weights.length];				for (int i = 0; i < weights.length; i++) {					constraints[i] = (SparseVector) weights[i].cloneMatrixZeroed ();					expectations[i] = (SparseVector) weights[i].cloneMatrixZeroed ();				}			} else {				constraints = expectations = null;				defaultConstraints = defaultExpectations = null;			}			for (int i = 0; i < numStates(); i++)				((State)getState(i)).setTrainable(f);			trainable = f;		}	}	public double getParametersAbsNorm ()	{		double ret = 0;		for (int i = 0; i < numStates(); i++) {			State s = (State) getState (i);			ret += Math.abs (s.initialCost);			ret += Math.abs (s.finalCost);		}		for (int i = 0; i < weights.length; i++) {			ret += Math.abs (defaultWeights[i]);			ret += weights[i].absNorm();		}		return ret;	}	/** Only sets the parameter from the first group of parameters. */	public void setParameter (int sourceStateIndex, int destStateIndex, int featureIndex, double value)	{		cachedValueStale = cachedGradientStale = true;		State source = (State)getState(sourceStateIndex);		State dest = (State) getState(destStateIndex);		int rowIndex;		for (rowIndex = 0; rowIndex < source.destinationNames.length; rowIndex++)			if (source.destinationNames[rowIndex].equals (dest.name))				break;		if (rowIndex == source.destinationNames.length)			throw new IllegalArgumentException ("No transtition from state "+sourceStateIndex+" to state "+destStateIndex+".");		int weightsIndex = source.weightsIndices[rowIndex][0];		if (featureIndex < 0)			defaultWeights[weightsIndex] = value;		else {			weights[weightsIndex].setValue (featureIndex, value);		}		someTrainingDone = true;	}	/** Only gets the parameter from the first group of parameters. */	public double getParameter (int sourceStateIndex, int destStateIndex, int featureIndex, double value)	{		State source = (State)getState(sourceStateIndex);		State dest = (State) getState(destStateIndex);		int rowIndex;		for (rowIndex = 0; rowIndex < source.destinationNames.length; rowIndex++)			if (source.destinationNames[rowIndex].equals (dest.name))				break;		if (rowIndex == source.destinationNames.length)			throw new IllegalArgumentException ("No transtition from state "+sourceStateIndex+" to state "+destStateIndex+".");		int weightsIndex = source.weightsIndices[rowIndex][0];		if (featureIndex < 0)			return defaultWeights[weightsIndex];		else			return weights[weightsIndex].value (featureIndex);	}		public void reset ()	{		throw new UnsupportedOperationException ("Not used in CRFs");	}	public void estimate ()	{		if (!trainable)			throw new IllegalStateException ("This transducer not currently trainable.");		// xxx Put stuff in here.		throw new UnsupportedOperationException ("Not yet implemented.  Never?");	}	// yyy	public void print ()	{		StringBuffer sb = new StringBuffer();		for (int i = 0; i < numStates(); i++) {			State s = (State) getState (i);			sb.append ("STATE NAME=\"");			sb.append (s.name);	sb.append ("\" ("); sb.append (s.destinations.length); sb.append (" outgoing transitions)\n");			sb.append ("  "); sb.append ("initialCost = "); sb.append (s.initialCost); sb.append ('\n');			sb.append ("  "); sb.append ("finalCost = "); sb.append (s.finalCost); sb.append ('\n');			for (int j = 0; j < s.destinations.length; j++) {				sb.append (" -> ");	sb.append (s.getDestinationState(j).getName());				for (int k = 0; k < s.weightsIndices[j].length; k++) {					sb.append (" WEIGHTS NAME=\"");					sb.append (weightAlphabet.lookupObject(s.weightsIndices[j][k]).toString());					sb.append ("\"\n");					sb.append ("  ");					sb.append (s.name); sb.append (" -> "); sb.append (s.destinations[j].name); sb.append (": ");					sb.append ("<DEFAULT_FEATURE> = "); sb.append (defaultWeights[s.weightsIndices[j][k]]); sb.append('\n');					SparseVector transitionWeights = weights[s.weightsIndices[j][k]];					if (transitionWeights.numLocations() == 0)						continue;					RankedFeatureVector rfv = new RankedFeatureVector (inputAlphabet, transitionWeights);					for (int m = 0; m < rfv.numLocations(); m++) {						double v = rfv.getValueAtRank(m);						int index = rfv.indexAtLocation (rfv.getIndexAtRank (m));						Object feature = inputAlphabet.lookupObject (index);						if (v != 0) {							sb.append ("  ");							sb.append (s.name); sb.append (" -> "); sb.append (s.destinations[j].name); sb.append (": ");							sb.append (feature); sb.append (" = "); sb.append (v); sb.append ('\n');						}					}				}			}		}		System.out.println (sb.toString());	}	// Java question:	// If I make a non-static inner class CRF.Trainer,	// can that class by subclassed in another .java file,	// and can that subclass still have access to all the CRF's	// instance variables?	public boolean train (InstanceList ilist)	{		return train (ilist, (InstanceList)null, (InstanceList)null);	}	public boolean train (InstanceList ilist, InstanceList validation, InstanceList testing)	{		return train (ilist, validation, testing, (TransducerEvaluator)null);	}		public boolean train (InstanceList ilist, InstanceList validation, InstanceList testing,										 TransducerEvaluator eval)	{		return train (ilist, validation, testing, eval, 9999);	}	public boolean train (InstanceList ilist, InstanceList validation, InstanceList testing,												TransducerEvaluator eval, int numIterations)	{		if (numIterations <= 0)			return false;		assert (ilist.size() > 0);		if (useSparseWeights) {			setWeightsDimensionAsIn (ilist);			 		} else {			setWeightsDimensionDensely ();		}				MaximizableCRF mc = new MaximizableCRF (ilist, this);		//Maximizer.ByGradient minimizer = new ConjugateGradient (0.001);		Maximizer.ByGradient maximizer = new LimitedMemoryBFGS();		int i;		boolean continueTraining = true;		boolean converged = false;		logger.info ("CRF about to train with "+numIterations+" iterations");		for (i = 0; i < numIterations; i++) {			try {				converged = maximizer.maximize (mc, 1);				logger.info ("CRF finished one iteration of maximizer, i="+i);			} catch (IllegalArgumentException e) {				e.printStackTrace();				logger.info ("Catching exception; saying converged.");				converged = true;			}			if (eval != null) {				continueTraining = eval.evaluate (this, (converged || i == numIterations-1), i,																					converged, mc.getValue(), ilist, validation, testing);				if (!continueTraining)					break;			}			if (converged) {				logger.info ("CRF training has converged, i="+i);				break;			}		}		logger.info ("About to setTrainable(false)");		// Free the memory of the expectations and constraints		setTrainable (false);		logger.info ("Done setTrainable(false)");

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -