⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 crf4.java

📁 常用机器学习算法,java编写源代码,内含常用分类算法,包括说明文档
💻 JAVA
📖 第 1 页 / 共 5 页
字号:
			out.writeInt(NULL_INTEGER);		}				if (weightsPresent != null) {			size = weightsPresent.length;			out.writeInt(size);			for (i=0; i<size; i++)				out.writeObject(weightsPresent[i]);		}	else {			out.writeInt(NULL_INTEGER);		}		if (featureSelections != null) {			size = featureSelections.length;			out.writeInt(size);			for (i=0; i<size; i++)				out.writeObject(featureSelections[i]);		} else {			out.writeInt(NULL_INTEGER);		}				out.writeObject(globalFeatureSelection);		out.writeObject(weightAlphabet);		out.writeBoolean(trainable);		out.writeBoolean(gatheringConstraints);		out.writeBoolean(gatheringWeightsPresent);		//out.writeInt(defaultFeatureIndex);		out.writeBoolean(usingHyperbolicPrior);		out.writeDouble(gaussianPriorVariance);		out.writeDouble(hyperbolicPriorSlope);		out.writeDouble(hyperbolicPriorSharpness);		out.writeBoolean(cachedValueStale);		out.writeBoolean(cachedGradientStale);		out.writeBoolean(someTrainingDone);		out.writeInt(featureInducers.size());		for (i = 0; i < featureInducers.size(); i++) {			out.writeObject(featureInducers.get(i));		}		out.writeBoolean(printGradient);		out.writeBoolean (useSparseWeights);	}		private void readObject (ObjectInputStream in) throws IOException, ClassNotFoundException {		int size, i;		int version = in.readInt ();		inputPipe = (Pipe) in.readObject();		outputPipe = (Pipe) in.readObject();		inputAlphabet = (Alphabet) in.readObject();		outputAlphabet = (Alphabet) in.readObject();		size = in.readInt();		states = new ArrayList();		for (i=0; i<size; i++) {			State s = (CRF4.State) in.readObject();			states.add(s);		}		size = in.readInt();		initialStates = new ArrayList();		for (i=0; i<size; i++) {			State s = (CRF4.State) in.readObject();			initialStates.add(s);		}		name2state = (HashMap) in.readObject();		size = in.readInt();		if (size == NULL_INTEGER) {			weights = null;		}		else {			weights = new SparseVector[size];			for(i=0; i< size; i++) {				weights[i] = (SparseVector) in.readObject();			}		}		size = in.readInt();		if (size == NULL_INTEGER) {			constraints = null;		}		else {			constraints = new SparseVector[size];			for(i=0; i< size; i++) {				constraints[i] = (SparseVector) in.readObject();			}		}		size = in.readInt();		if (size == NULL_INTEGER) {			expectations = null;		}		else {			expectations = new SparseVector[size];			for(i=0; i< size; i++) {				expectations[i] = (SparseVector)in.readObject();			}		}		size = in.readInt();		if (size == NULL_INTEGER) {			defaultWeights = null;		}		else {			defaultWeights = new double[size];			for(i=0; i< size; i++) {				defaultWeights[i] = in.readDouble();			}		}		size = in.readInt();		if (size == NULL_INTEGER) {			defaultConstraints = null;		}		else {			defaultConstraints = new double[size];			for(i=0; i< size; i++) {				defaultConstraints[i] = in.readDouble();			}		}		size = in.readInt();		if (size == NULL_INTEGER) {			defaultExpectations = null;		}		else {			defaultExpectations = new double[size];			for(i=0; i< size; i++) {				defaultExpectations[i] = in.readDouble();			}		}		size = in.readInt();		if (size == NULL_INTEGER) {			weightsPresent = null;		}	else {			weightsPresent = new BitSet[size];			for(i=0; i<size; i++)				weightsPresent[i] = (BitSet)in.readObject();		}		size = in.readInt();		if (size == NULL_INTEGER) {			featureSelections = null;		}	else {			featureSelections = new FeatureSelection[size];			for(i=0; i<size; i++)				featureSelections[i] = (FeatureSelection)in.readObject();		}				globalFeatureSelection = (FeatureSelection) in.readObject();		weightAlphabet = (Alphabet) in.readObject();		trainable = in.readBoolean();		gatheringConstraints = in.readBoolean();		gatheringWeightsPresent = in.readBoolean();		//defaultFeatureIndex = in.readInt();		usingHyperbolicPrior = in.readBoolean();		gaussianPriorVariance = in.readDouble();		hyperbolicPriorSlope = in.readDouble();		hyperbolicPriorSharpness = in.readDouble();		cachedValueStale = in.readBoolean();		cachedGradientStale = in.readBoolean();		someTrainingDone = in.readBoolean();		size = in.readInt();		featureInducers = new ArrayList();		for (i = 0; i < size; i++) {			featureInducers.add((FeatureInducer)in.readObject());		}		printGradient = in.readBoolean();		if (version > 1) {			useSparseWeights = in.readBoolean();		} else {			useSparseWeights = false;		}			}	public class MaximizableCRF implements Maximizable.ByGradient, Serializable	{		InstanceList trainingSet;		double cachedValue = -123456789;		DenseVector cachedGradient;		BitSet infiniteValues = null;		int numParameters;		CRF4 crf;		protected MaximizableCRF (InstanceList ilist, CRF4 crf)		{			// Set up			this.numParameters = 2 * numStates() + defaultWeights.length;			for (int i = 0; i < weights.length; i++)				numParameters += weights[i].numLocations();			this.trainingSet = ilist;			this.crf = crf;			cachedGradient = new DenseVector (numParameters);			// This resets and values that may have been in expecations and constraints			setTrainable (true);			// Set the contraints by running forward-backward with the *output			// label sequence provided*, thus restricting it to only those			// paths that agree with the label sequence.			gatheringConstraints = true;			for (int i = 0; i < ilist.size(); i++) {				Instance instance = ilist.getInstance(i);				FeatureVectorSequence input = (FeatureVectorSequence) instance.getData();				FeatureSequence output = (FeatureSequence) instance.getTarget();				//System.out.println ("Confidence-gathering forward-backward on instance "+i+" of "+ilist.size());				this.crf.forwardBackward (input, output, true);				//System.out.println ("Gathering constraints for Instance #"+i);			}			gatheringConstraints = false;			//System.out.println ("testing Value and Gradient");      //TestMaximizable.testValueAndGradientCurrentParameters (this);		}		public Matrix getNewMatrix () { return new DenseVector (numParameters); }		// Negate initialCost and finalCost because the parameters are in		// terms of "weights", not "values".				public int getNumParameters () {return this.numParameters;}		public void getParameters (double[] buffer)		{			if (buffer.length != getNumParameters ())				buffer = new double [getNumParameters()];			DenseVector parameters = new DenseVector (buffer, true);			int pi = 0;			for (int i = 0; i < numStates(); i++) {				State s = (State) getState (i);				parameters.setValue (pi++, -s.initialCost);				parameters.setValue (pi++, -s.finalCost);			}			for (int i = 0; i < weights.length; i++) {				parameters.setValue (pi++, defaultWeights[i]);				int nl = weights[i].numLocations();				for (int j = 0; j < nl; j++)					parameters.setValue (pi++, weights[i].valueAtLocation(j));			}			parameters.arrayCopyTo (0, buffer);		}				public double getParameter (int index) {			int numStateParms = 2 * numStates();			if (index < numStateParms) {				State s = (State)getState(index / 2);				if (index % 2 == 0)					return -s.initialCost;				else					return -s.finalCost;			} else {				index -= numStateParms;				for (int i = 0; i < weights.length; i++) {					if (index == 0)						return defaultWeights[i];					index--;					if (index < weights[i].numLocations())						return weights[i].valueAtLocation (index);					else						index -= weights[i].numLocations();				}				throw new IllegalArgumentException ("index too high = "+index);			}		}				public void setParameters (double [] buff) {			assert (buff.length == getNumParameters());			cachedValueStale = cachedGradientStale = true;			DenseVector parameters = new DenseVector (buff, true);						int pi = 0;			for (int i = 0; i < numStates(); i++) {				State s = (State) getState (i);				s.initialCost = -parameters.value (pi++);				s.finalCost = -parameters.value (pi++);			}			for (int i = 0; i < weights.length; i++) {				defaultWeights[i] = parameters.value (pi++);				int nl = weights[i].numLocations();				for (int j = 0; j < nl; j++)					weights[i].setValueAtLocation (j, parameters.value (pi++));			}			someTrainingDone = true;		}		public void setParameter (int index, double value) {			cachedValueStale = cachedGradientStale = true;			int numStateParms = 2 * numStates();			if (index < numStateParms) {				State s = (State)getState(index / 2);				if (index % 2 == 0)					s.initialCost = -value;				else					s.finalCost = -value;			} else {				index -= numStateParms;				for (int i = 0; i < weights.length; i++) {					if (index == 0) {						defaultWeights[i] = value;						return;					} else						index--;					if (index < weights[i].numLocations()) {						weights[i].setValueAtLocation (index, value);					} else						index -= weights[i].numLocations();				}				throw new IllegalArgumentException ("index too high = "+index);			}		}		// log probability of the training sequence labels		public double getValue ()		{			if (cachedValueStale) {				long startingTime = System.currentTimeMillis();				cachedValue = 0;				cachedGradientStale = true;				// Instance values must either always or never be included in				// the total values; we can't just sometimes skip a value				// because it is infinite, this throws off the total values.				boolean initializingInfiniteValues = false;				if (infiniteValues == null) {					infiniteValues = new BitSet ();					initializingInfiniteValues = true;				}				// Clear the sufficient statistics that we are about to fill				for (int i = 0; i < numStates(); i++) {					State s = (State)getState(i);					s.initialExpectation = 0;					s.finalExpectation = 0;				}				for (int i = 0; i < weights.length; i++) {					expectations[i].setAll (0.0);					defaultExpectations[i] = 0.0;				}				// Calculate the value of each instance, and also fill in expectations				double unlabeledCost, labeledCost, cost;				for (int ii = 0; ii < trainingSet.size(); ii++) {					Instance instance = trainingSet.getInstance(ii);					FeatureVectorSequence input = (FeatureVectorSequence) instance.getData();					FeatureSequence output = (FeatureSequence) instance.getTarget();					labeledCost = forwardBackward (input, output, false).getCost();					//System.out.println ("labeledCost = "+labeledCost);					if (Double.isInfinite (labeledCost))						logger.warning (instance.getName().toString() + " has infinite labeled cost.\n"														+(instance.getSource() != null ? instance.getSource() : ""));					unlabeledCost = forwardBackward (input, true).getCost ();					//System.out.println ("unlabeledCost = "+unlabeledCost);

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -