⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 neuralnetwork.java

📁 利用Java实现的神经网络工具箱
💻 JAVA
📖 第 1 页 / 共 2 页
字号:
/* * NeuralNetwork.java 	1.0 09 Jun 2004 * * NeuralNetworkToolkit * Copyright (C) 2004 Universidade de Bras�lia * * This file is part of NeuralNetworkToolkit. * * NeuralNetworkToolkit is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2 of the License, or * (at your option) any later version. * * NeuralNetworkToolkit is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with NeuralNetworkToolkit; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA - 02111-1307 - USA. */package neuralnetworktoolkit.neuralnetwork;import java.util.Vector;import java.io.Serializable;import neuralnetworktoolkit.activationfunctions.*;import neuralnetworktoolkit.neuralnetwork.weightinitialization.WeightInitialization;/** * Class that implements some basic features of a neural network. *  * @version 1.0 09 Jun 2004 *  * @author <a href="mailto:hugoiver@yahoo.com.br">Hugo Iver V. Gon�alves</a> * @author <a href="mailto:rodbra@pop.com.br">Rodrigo C. M. Coimbra</a> */public class NeuralNetwork implements INeuralNetwork, Serializable {	public static final int MINIMUM_SIZE          = 15;	public static final int DYNAMIC               = 0;	public static final int NOT_DYNAMIC           = 1;	public static final int MULTICONEXED          = 0;	public static final int NOT_MULTICONEXED      = 1;	public static final int RECURRENT             = 0;	public static final int NOT_RECURRENT         = 1;	public static final int AUTO_HIDDEN           = 0;	public static final int NOT_AUTO_HIDDEN       = 1;	public static final int NUMERICAL_INPUTS      = 0;	public static final int NOT_NUMERICAL_INPUTS  = 1;	public static final int NOMINAL_INPUTS        = 0;	public static final int NOT_NOMINAL_INPUTS    = 1;	public static final int MIXED_INPUTS          = 0;	public static final int NOT_MIXED_INPUTS      = 1;	public static final int MULTIPLE_AF           = 0;	public static final int NOT_MULTIPLE_AF       = 1;		private int            isMultiConexed;	private int            isDynamic;	private int            isRecurrent;	private int            isAutoHidden;	private int            isNeuronNormalizer;	private int            isNumericalInputs;	private int            isNominalInputs;	private int            isMixedInputs;	private int            isMultipleActivationFunctions;	private Vector         dynamicLayers;	private ILayer[]       staticLayers;	private Vector         dynamicInputValues;	private double[]       staticInputValues;	private double[]       staticResultValues;	private IFunctionCache functionCache;	private int            networkSize                    = 0;	private int            inputSize                      = 0;	private int            outputSize                     = 0;	private int            numberOfInternalLayers         = 0;	private double         error;	/**	 * Creates a neural network with default attributes.	 */	public NeuralNetwork() {		this.isDynamic = NOT_DYNAMIC;		this.isMultiConexed = MULTICONEXED;		this.isRecurrent = NOT_RECURRENT;		this.isAutoHidden = AUTO_HIDDEN;		//this.isNeuronNormalizer = NOT_NEURON_NORMALIZER;		this.isNumericalInputs = NUMERICAL_INPUTS;		this.isNominalInputs = NOT_NOMINAL_INPUTS;		this.isMixedInputs = NOT_MIXED_INPUTS;		this.isMultipleActivationFunctions = NOT_MULTIPLE_AF;		this.functionCache = new FunctionCache();			} //NeuralNetwork()	/**	 * Creates a neural network with default attributes for a specific	 * input layer size. <br><b>Implementation not completed! Do not use this!</b>	 * 	 * @param inputSize Input layer size.	 */	public NeuralNetwork(int inputSize) {		this();		// TODO Finish this implementation.			} //NeuralNetwork()	/**	 * <b>Not implemented! Do not use this!</b>	 */	public NeuralNetwork(String configuration, int size) {		// TODO Implement this.			} //NeuralNetwork()	/**	 * Creates a neural network with default attributes for the specifics	 * input layer size, internal layers size, output layer size, the	 * indication of a normalizer layer and a array of activation functions	 * (one for each computational layer).	 * 	 * @param inputSize    Input layer size.	 * @param internalSize Number of internal layers.	 * @param outputSize   Output layer size.	 * @param layerSize    Internal layers size.	 * @param normalizer   Indication of normalizer layer.	 * @param functionName Activation functions array.	 */	public NeuralNetwork(int inputSize,						 int internalSize,						 int outputSize,						 int layerSize,						 int normalizer,						 WeightInitialization initialization,						 String[] functionName) {						 			this();		this.isNeuronNormalizer = normalizer;		this.inputSize = inputSize;		this.outputSize = outputSize;		for (int i=0; i < functionName.length; i++) {			functionCache.addActivationFunction(functionName[i]);		}				int internalLayerSize = layerSize;				this.numberOfInternalLayers = internalSize;		this.staticResultValues = new double[outputSize];		int control = 0;				networkSize = internalSize + 1;		this.staticLayers = new ILayer[networkSize];						int numPesos = internalLayerSize;		for (int i = (networkSize - 2); i >= control; i--) {			if (i==control) {				numPesos = inputSize;							}			staticLayers[i] =				new Layer(					numPesos,					internalLayerSize,					NOT_DYNAMIC,			functionCache.getActivationFunction(functionName[i]));					}		staticLayers[networkSize - 1] =			new Layer(				internalLayerSize,				outputSize,				NOT_DYNAMIC,			functionCache.getActivationFunction(functionName[networkSize-1]));				initialization.initialize(this);			} //NeuralNetwork()	/**	 * Creates a neural network with default attributes for the specifics	 * input layer size, internal layers size, output layer size, the	 * indication of a normalizer layer and a array of activation functions	 * (one for each computational layer).	 * 	 * @param inputSize    Input layer size.	 * @param internalSize Number of internal layers.	 * @param outputSize   Output layer size.	 * @param layerSize    Internal layers size.	 * @param normalizer   Indication of normalizer layer.	 * @param functionName Activation functions array.	 */	public NeuralNetwork(int[] structure, WeightInitialization initialization,String[] functionName) {						 			this();		this.inputSize = structure[0];		this.outputSize = structure[structure.length-1];		for (int i=0; i < functionName.length; i++) {			functionCache.addActivationFunction(functionName[i]);		}				this.numberOfInternalLayers = structure.length - 2;		this.staticResultValues = new double[outputSize];		int control = 0;				networkSize = structure.length - 1;		this.staticLayers = new ILayer[networkSize];						int numPesos;		for (int i = (networkSize - 1); i >= control; i--) {			numPesos = structure[i];						staticLayers[i] =				new Layer(					numPesos,					structure[i+1],					NOT_DYNAMIC,			functionCache.getActivationFunction(functionName[i]));					}				initialization.initialize(this);			} //NeuralNetwork()		/**	 * Creates a neural network with default attributes for the specifics	 * input layer size, output layer size, the indication of a normalizer	 * layer and a array of activation functions (one for each computational	 * layer).	 * 	 * @param inputSize    Input layer size.	 * @param outputSize   Output layer size.	 * @param normalizer   Indication of normalizer layer.	 * @param functionName Activation functions array.	 */	public NeuralNetwork(int inputSize,						 int outputSize,						 int normalizer,						 String functionName) {								this();		this.inputSize = inputSize;		this.outputSize = outputSize;		IActivationFunction activationFunction;		this.isNeuronNormalizer = normalizer;		functionCache.addActivationFunction(functionName);		activationFunction = functionCache.getActivationFunction(functionName);		networkSize = 0;		int internalLayerSize = 0;		this.numberOfInternalLayers = 1;		this.staticResultValues = new double[outputSize];		if ((inputSize + outputSize) / 2 > MINIMUM_SIZE) {			internalLayerSize = (inputSize + outputSize) / 2;		} else {			internalLayerSize = MINIMUM_SIZE;		}				networkSize = 2;					this.staticLayers = new ILayer[2];		staticLayers[networkSize - 2] =			new Layer(				inputSize,				internalLayerSize,				NOT_DYNAMIC,				activationFunction);				staticLayers[networkSize - 1] =			new Layer(				internalLayerSize,				outputSize,				NOT_DYNAMIC,				activationFunction);		// TODO is the the network built?.	} //NeuralNetwork()	/* (non-Javadoc)	 * @see neuralnetworktoolkit.INeuralNetwork#addLayer(neuralnetworktoolkit.ILayer)	 */	public void addLayer(ILayer layer) {		switch (isDynamic) {			case NeuralNetwork.NOT_DYNAMIC :				{				// TODO Implement this.				}				break;			case NeuralNetwork.DYNAMIC :				{					dynamicLayers.addElement(layer);				}				break;		}	} //addLayer()	/* (non-Javadoc)	 * @see neuralnetworktoolkit.INeuralNetwork#removeLayer(int)	 */	public void removeLayer(int index) {				switch (isDynamic) {			case NeuralNetwork.NOT_DYNAMIC :				{					// TODO Implement this.				}				break;			case NeuralNetwork.DYNAMIC :				{					dynamicLayers.removeElementAt(index);				}				break;		}			} //removeLayer()	/* (non-Javadoc)	 * @see neuralnetworktoolkit.INeuralNetwork#getLayer(int)	 */	public ILayer getLayer(int index) {		ILayer result = null;				switch (isDynamic) {			case NeuralNetwork.NOT_DYNAMIC :				{					result = (ILayer) staticLayers[index];				}				break;			case NeuralNetwork.DYNAMIC :				{					result = (ILayer) dynamicLayers.elementAt(index);				}				break;		}		return result;			} //getLayer()	/* (non-Javadoc)	 * @see neuralnetworktoolkit.INeuralNetwork#inputLayerSetup(double[])	 */	public void inputLayerSetup(double[] inputValues) {		switch (isDynamic) {			case NeuralNetwork.NOT_DYNAMIC :				{					staticInputValues = inputValues;				}				break;			case NeuralNetwork.DYNAMIC :				{					// TODO Implement this.				}				break;		}			} //inputLayerSetup()	/**	 * Creates a normalizer layer.	 * 	 * @param size Layer size.	 * 	 * @return Normalize layer.	 */	private ILayer createNormalizerLayer(int size) {		ILayer normalizer = null;		switch (isDynamic) {			case NeuralNetwork.NOT_DYNAMIC :				{					// TODO create normalizer layer.				}				break;			case NeuralNetwork.DYNAMIC :				{					// TODO Implement this.				}				break;		}				return normalizer;			} //createNormalizerLayer()	/* (non-Javadoc)	 * @see neuralnetworktoolkit.INeuralNetwork#propagateInput()	 */	public void propagateInput() {		switch (isDynamic) {			case NeuralNetwork.NOT_DYNAMIC :				{					switch (isMultiConexed) {						case NeuralNetwork.MULTICONEXED :							{								// For the first layer:								for (int i = 0;									i < staticLayers[0].getLayerSize();									i++) {									double input = 0;									for (int j = 0;										j < staticInputValues.length;										j++) {										input =											input												+ staticInputValues[j]													* staticLayers[0].getWeight(														j,														i);									}									input = input + staticLayers[0].getBias(i);									staticLayers[0].getNeuron(i).setInput(										input);									staticLayers[0]										.getNeuron(i)										.calculateOutputValue();																	}								// For each remaining layer:									for (int i = 1; i < staticLayers.length; i++) {									// For each neuron:									for (int j = 0;										j < staticLayers[i].getLayerSize();

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -