⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 outputlayer.java

📁 JaNet: Java Neural Network Toolkit resume: A well documented toolkit for designing and training, a
💻 JAVA
字号:
////////////////////////////////////////////////////////////////////////////////////  //  //  Copyright (C) 1996 L.  Patocchi & W.Gander////  This program is free  software;  you can  redistribute  it and/or  modify it//  under the terms of the GNU General  Public  License as published by the Free//  Software  Foundation;  either  version 2 of the License, or (at your option)//  any later version.////  This program is  distributed in the hope that it will be useful, but WITHOUT//  ANY  WARRANTY;  without  even the implied  warranty  of  MERCHANTABILITY  or//  FITNESS FOR A PARTICULAR  PURPOSE.  See the GNU General  Public  License for//  more details.////  You should have received a copy of the GNU General Public License along with//  this program; if not, write to the Free Software  Foundation, Inc., 675 Mass//  Ave, Cambridge, MA 02139, USA.////  Contacts://  //    Project Supervisor//      W.Hett       hew@info.isbiel.ch//  //    Authors//      W.Gander     gandw@info.isbiel.ch//      L.Patocchi   patol@info.isbiel.ch////  Documentation can be found at:////      http://www.isbiel.ch/Projects/janet/index.html////////////////////////////////////////////////////////////////////////////////////////// File : outputLayer.java////////////////////////////////////////////////////////////////////////						outputLayer extending class////////////////////////////////////////////////////////////////////////	Author:  Patocchi L.//	Date:    02.09.1996//	Project: jaNet////	outputLayer is the extended BPNLayer class for a BackPropagation //	output layer.//	//////////////////////////////////////////////////////////////////////	date		who					what//  02.09.1996	Patocchi L.			creationpackage jaNet.backprop;import java.io.*;public class outputLayer extends BPNLayer{	// those 3 layer-length arrays keep internal values for each unit.	protected 			double 			sum[];				// sum of x(i)*w(i,j) for unit j in this lajer	protected 			double 			delta[];			// delta or error for each unit in this layer	protected 			double 			bias[];				// bias for each unit in this layer		// since this is an output layer we have only on upper layer and weight pack between.	protected 			BPNLayer 		upperLayer;	protected 			BPNWeightPack	upperWeights;		// the activation function of this layer named by fnClassName is referenced by aFn.	protected static 	activationFn 	aFn;	protected 			String			fnClassName;		// neural nets functional variables;	protected 			double			learningRate;	protected 			double			momentum;	protected 			double			error;//////////////////////////////////////////////////////////////////////	Constructors////////////////////////////////////////////////////////////////////	public outputLayer(){		this(1);	}	public outputLayer(int n){		setSize(n);	}//////////////////////////////////////////////////////////////////////	initialisers////////////////////////////////////////////////////////////////////	public void reset(){		reset(0.0);	}	public void reset(double x){		for(int i=0; i<vector.length; i++){			sum[i]   =   x;			delta[i] =   x;		}	}//////////////////////////////////////////////////////////////////////	parameter-providers////////////////////////////////////////////////////////////////////	public double[] getDelta(){		double newDelta[] = new double[vector.length];		for(int i=0; i<vector.length; i++) newDelta[i] = delta[i];		return newDelta;	}	public double[] getBias(){		double newBias[] = new double[vector.length];		for(int i=0; i<vector.length; i++)newBias[i] = bias[i];		return newBias;	}	public double getDelta(int x) throws BPNException{		if(x <0 || x>=vector.length) throw new BPNException("outputLayer: Error in getDelta(x), <x> under/overflow layer size.");		return delta[x];	}	public double getBias(int x) throws BPNException{		if(x <0 || x>=vector.length) throw new BPNException("outputLayer: Error in getBias(x), <x> under/overflow layer size.");		return bias[x];	}	public double[] getDelta(int From, int To) throws BPNException{		if(From > To) throw new BPNException("outputLayer: Error in getDelta(From, To), <From> greather than <To>.");		if(From <0 || To>=vector.length) throw new BPNException("outputLayer: Error in getDelta(From, To), <From> or <To> under/overflow layer size.");		double newDelta[] = new double[To - From+1];		for(int i=From; i<=To; i++)newDelta[i-From] = delta[i];		return newDelta;	}	public double[] getBias(int From, int To) throws BPNException{		if(From > To) throw new BPNException("outputLayer: Error in getBias(From, To), <From> greather than <To>.");		if(From <0 || To>=vector.length) throw new BPNException("outputLayer: Error in getBias(From, To), <From> or <To> under/overflow layer size.");		double newBias[] = new double[To - From+1];		for(int i=From; i<=To; i++)newBias[i-From] = bias[i];		return newBias;	}	public double getLearningRate(){		return learningRate;	}	public double getMomentum(){		return momentum;	}	public double getError(){		return error;	}	public double getError(double vec[], double target[]) throws BPNException{		if(target == null || vec == null) 			throw new BPNException("outputLayer: Error in getError, vector or target is null.");		if(target.length!=vector.length) 			throw new BPNException("outputLayer: Error in getError, target length don't match layer length.");				// propagate vec		BPNLayer tempLayer = upperLayer;		// search input layer to propagate down current pattern		while(tempLayer.getLayerKind() != BPNLayer.INPUT){			tempLayer = tempLayer.getUpperLayer();			if( tempLayer == null )				throw new BPNException("GENERAL PROGRAM ERROR in outputLayer.learn, searching InputLayer: Please contact authors, Thank-You");		}		if(vec.length!=tempLayer.getSize()) 			throw new BPNException("outputLayer: Error in getError, vector length ("+vec.length+") don't match layer length ("+vector.length+").");		// found, then propagate input vector		tempLayer.propagate(vec);				// calculate error for this layer		error = 0.0;		for(int i=0; i<vector.length; i++)			error += (target[i] - vector[i])*(target[i] - vector[i]);		error *= (1.0/2.0);		return error;	}//////////////////////////////////////////////////////////////////////	parameter-modifiers////////////////////////////////////////////////////////////////////	public void setSize(int size){		super.setSize(size);		delta = new double[size];		sum	  = new double[size];		bias  = new double[size];		reset();	}	public void setBias(double[] b) throws BPNException{		if(b == null) throw new BPNException("outputLayer: Error in setBias(vector), <vector> is null.");		if(b.length!=vector.length) throw new BPNException("outputLayer: Error in setBias(vector), <vector> length don't match layer length.");				for(int i=0; i<b.length; i++) bias[i] = b[i];	}	public void setBias(double b){				for(int i=0; i<bias.length; i++) bias[i] = b;	}	public void setBias(double v, int at) throws BPNException{		double x[] = new double[1];		x[0] = v;		setBias(x, at);		x = null;	}	public void setBias(double[] v, int at) throws BPNException{		if(v == null) throw new BPNException("outputLayer: Error in setBias(vector, at), <vector> is null.");		if(at<0) throw new BPNException("outputLayer: Error in setBias(vector,at), <at> is negative.");		if(v.length+at>vector.length) throw new BPNException("outputLayer: Error in setBias(vector,at), <at> + <vector> length overflow layer length.");					// insert the array 'v' at given position 'at'		for(int i=at; i<v.length+at; i++) bias[i] = v[i-at];	}	public void setLearningRate(double val){		learningRate = val;	}	public void setMomentum(double val){		momentum = val;	}//////////////////////////////////////////////////////////////////////	Plug-in's (activating function, layers, weights pack)//////////////////////////////////////////////////////////////////// 	public void setActivationFnClass(String fnclassname) throws BPNException{		activationFn 	aFnTemp;		try{			// try to load and create a new instance of a class which the name is specified in fnclassname string.			aFnTemp = (activationFn) Class.forName(fnclassname).newInstance();		}catch (InstantiationException instex){			throw new BPNException("outputLayer: Error in instantiation activating fn class ("+instex+").");		}catch (IllegalAccessException illaccex){			throw new BPNException("outputLayer: Error in accessing activating fn class ("+illaccex+").");		}catch (ClassNotFoundException classnotfound){			throw new BPNException("outputLayer: Error, activating fn class '"+fnclassname+"' not found ("+classnotfound+").");		}		fnClassName = fnclassname;		aFn = aFnTemp;	}	public String getActivationFnClassName(){		return new String(fnClassName);	}	public void setUpperLayer(BPNLayer layer){		upperLayer = layer;	}	public BPNLayer getUpperLayer(){		return upperLayer;	}	public void setUpperWeightPack(BPNWeightPack uwp){		upperWeights = uwp;	}	public BPNWeightPack getUpperWeightPack(){		return upperWeights;	}//////////////////////////////////////////////////////////////////////	I/O ports////////////////////////////////////////////////////////////////////	public void writeToFile(RandomAccessFile raf) throws BPNException{		try{			raf.writeUTF(getClass().getName()+version);			int size = getSize();			raf.writeInt(size);			for(int i=0; i<size; i++)				raf.writeDouble(bias[i]);			raf.writeUTF(fnClassName);		}catch(IOException ioe){			throw new BPNException("outputLayer: Error in writeToFile,("+ioe+").");		}	}	public static outputLayer readFromFile(RandomAccessFile raf) throws BPNException{		int upper, lower;		outputLayer temp = new outputLayer();				try{			if(raf.readUTF().compareTo(temp.getClass().getName()+version) != 0){				throw new BPNException("outputLayer: Error in readFromFile, unknown version.");			}			// setup size of tables in temporary variables			int size = raf.readInt();			temp = new outputLayer(size);			double newBias[] = new double[size];						// read content and stores values			for(int i=0; i<size; i++)				newBias[i] = raf.readDouble();						temp.setBias(newBias);			temp.setActivationFnClass(raf.readUTF());		}catch(IOException ioe){			throw new BPNException("outputLayer: Error in readFromFile,("+ioe+").");		}				return temp;	}//////////////////////////////////////////////////////////////////////	neural network hidden layer functionalities////////////////////////////////////////////////////////////////////	public void propagate() throws BPNException{		if(aFn == null)			throw new BPNException("hiddenLayer: Error in ("+this+"), activation Fn is not defined.");		// lower neuron connected weights array		double weightsToUpper[] = null;		// get upper layer values		double upperVector[] = upperLayer.getVector();				// may be possible to parallelize this in future ?		for(int i=0; i<vector.length; i++){			// get weigths that connect neuron i to upper neurons			try{				weightsToUpper = upperWeights.getLowerNeuronConnectedWeights(i);			}catch(BPNException bpne){				throw new BPNException("GENERAL PROGRAM ERROR in outputLayer.propagate: Please contact authors, Thank-You.\n("+bpne+")");			}			sum[i] = 0.0;			// perform the sum of all upper units times weigth that connect this unit			for(int j=0; j<upperVector.length; j++){				sum[i] += upperVector[j] * weightsToUpper[j];				//System.out.print("u("+upperVector[j]+") w("+weightsToUpper[j]+")");			}			//System.out.println(" unit"+i);			// update this neuron output value			vector[i] = aFn.activation(sum[i]) + bias[i];		}			}	public void learn(double input[], double target[]) throws BPNException{		double upperVector[] = null;		double weightsToUpper[] = null;		double deltaWeightsToUpper[] = null;			if(input == null||target == null)			throw new BPNException("outputLayer: Error in learn, input or targer are null.");		// before learn and change any weight search for input layer		// and propagate through the network		BPNLayer tempLayer = upperLayer;		// search input layer to propagate down current pattern		while(tempLayer.getLayerKind() != BPNLayer.INPUT){			tempLayer = tempLayer.getUpperLayer();			if( tempLayer == null )				throw new BPNException("GENERAL PROGRAM ERROR in outputLayer.learn, searching InputLayer: Please contact authors, Thank-You");		}		if(target.length!=vector.length) 			throw new BPNException("outputLayer: Error in learn, targer doesn't match output.length.");		// found, then propagate input vector		tempLayer.propagate(input);		// Update delta values for this layer and compute error for this layer		error = 0.0;		for(int i=0; i<vector.length; i++){			delta[i] = (target[i] - vector[i])*aFn.activation1stD(sum[i]);			error += (target[i] - vector[i])*(target[i] - vector[i]);		}		error *= (1.0/2.0);		// get upperLayer vector;		upperVector = upperLayer.getVector();		// modify outputLayer upperWeights		// this 'for' statement  is hightly parallelisable		for(int i=0; i<vector.length; i++){			// get weigths that connect neuron i to upperLayer			try{				weightsToUpper = upperWeights.getLowerNeuronConnectedWeights(i);				deltaWeightsToUpper = upperWeights.getLowerNeuronConnectedDeltas(i);			}catch(BPNException bpne){				throw new BPNException("GENERAL PROGRAM ERROR in outputLayer.learn, getting weightsToUpper: Please contact authors, Thank-You.\n("+bpne+")");			}			// update weights values			for(int j=0; j<upperVector.length; j++){				double deltaW = deltaWeightsToUpper[j] * momentum + learningRate * upperVector[j] * delta[i];				weightsToUpper[j] += deltaW;				//System.out.print("to("+j+") D("+deltaW+")");			}			//System.out.println(" ftom unit "+i);			// restoring weights values in UpperWeights			upperWeights.setLowerNeuronConnectedWeights(i, weightsToUpper);		}		// updated all now feedback tu upperLayer				upperLayer.learn();	}}

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -