⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 .#networkcontroller.java.1.30

📁 利用Java实现的神经网络工具箱
💻 30
📖 第 1 页 / 共 2 页
字号:
/* * $RCSfile: NetworkController.java,v $ * $Revision: 1.30 $ * $Date: 2005/03/11 03:17:21 $ * * NeuralNetworkToolkit * Copyright (C) 2004 Universidade de Brasília * * This file is part of NeuralNetworkToolkit. * * NeuralNetworkToolkit is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2 of the License, or * (at your option) any later version. * * NeuralNetworkToolkit is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with NeuralNetworkToolkit; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA - 02111-1307 - USA. */package neuralnetworktoolkit;import java.util.*;import neuralnetworktoolkit.normalization.*;import neuralnetworktoolkit.architectures.*;import neuralnetworktoolkit.architectures.mlp.*;import neuralnetworktoolkit.math.*;import neuralnetworktoolkit.methods.*;import neuralnetworktoolkit.neuralnetwork.*;import neuralnetworktoolkit.neuralnetwork.weightinitialization.*;import neuralnetworktoolkit.validation.*;/** * This class provides facilities to the use of the Neural Network Toolkit API. * It puts together many of the API resources making it easy to integrate NNTK * with other stand alone software. We strongly recommend its use because it * imposes some rules on the correct use of the API. *  * @version $Revision: 1.30 $ - $Date: 2005/03/11 03:17:21 $ *  * @author <a href="mailto:hugoiver@yahoo.com.br">Hugo Iver V. Gonçalves </a> * @author <a href="mailto:rodbra@pop.com.br">Rodrigo C. M. Coimbra </a> */public class NetworkController {		private ResourceBundle resources;		/**     * Defines that input and output data will be normalized.     */	public static final int FULLNORMALIZER = NormalizationParameters.NORMALIZE_ALL;		/**     * Defines that only input data will be normalized.     */	public static final int INPUTNORMALIZER = NormalizationParameters.NORMALIZE_INPUT;	/**	 * Standard methods package.	 */	private static final String METHODS_PACKAGE = "neuralnetworktoolkit.methods.";		/**	 * Standard normalization package.	 */	private static final String NORMALIZATION_PACKAGE = "neuralnetworktoolkit.normalization.";		/**	 * Standard initialization package.	 */	private static final String INITIALIZATION_PACKAGE = "neuralnetworktoolkit.neuralnetwork.weightinitialization.";		/**	 * Standard validation package.	 */	private static final String VALIDATION_PACKAGE = "neuralnetworktoolkit.validation.";		/**	 * A String that represents the network architecture.	 */	private String networkArchitecture;		/**     * Normalization classes to be used in input and output data respectively.     */	private INormalization inputNormalization, outputNormalization;	private StatisticalResults statisticalResults;		/**	 * The neural network.	 */	private INeuralNetwork neuralNetwork;		/**     * Training method class.     */	private ITrainingMethod trainingMethod;	private AboutTrainingMethod methodInfo;	private WeightInitialization initialization;				private double[][] completeData;	private double[][] inputs;	private double[][] outputs;		private String[] completeDataHeader;	private String[] inputsDataHeader;	private String[] outputsDataHeader;		private double[][] inferenceData;	private double[][] inferedData;		private String[] inferenceDataHeader;	private String[] inferedDataHeader;		private int[] outputIndex; 						private boolean isNormalizer = false;	private boolean isFullNormalizer = false;	private boolean isNNTrained = false;	private boolean isOutputIndexSet = false;	private boolean isValidationDataLoaded = false;	private boolean isDataSplit = false;		private Date date;		/**	 * Default constructor.	 *	 */	public NetworkController() {		this.resources = ResourceBundle.getBundle("neuralnetworktoolkit.resources.Resources");		this.networkArchitecture = "";	} //createNeuralNetwork()		/**	 * Creates a neural network from a neural network architecture parameters.	 * 	 * @param parameters	 * 	 * @throws NetworkControllerException	 */	public void createNeuralNetwork(ArchitectureParameters parameters)			throws NetworkControllerException {		MLPParameters param = (MLPParameters) parameters;		createNeuralNetwork(param.getNeurons(), param.getInitializationName(),				param.getActivationFunctions());	} //createNeuralNetwork()		/**	 * Creates a neural network from a neural model. Actually it just gets the already	 * existent objects in the neural model ant sets them in this NetworkController.	 * 	 * @param nm The neural model.	 * 	 * @throws NetworkControllerException	 */	public void createNeuralNetwork(NeuralModel nm) throws NetworkControllerException {		this.neuralNetwork = nm.getNeuralNetwork();		this.inputNormalization = nm.getInputNormalization();		this.outputNormalization = nm.getOutputNormalization();		this.date = nm.getDate();				if ( inputNormalization != null) {			isNormalizer = true;			if ( outputNormalization != null ) {				isFullNormalizer = true;			}		}	} //createNeuralNetwork()		/**	 * Creates a neural network according to the specifications provided by the	 * user.	 * 	 * @param parameters Parameters used to build the neural network. The user must	 * 		  provide an int array containing the number of neurons in each layer, including	 *        the input layer. For instance, {2, 3, 4, 2} defines a neural network with an input	 *        layer of size 2, and 3 computational layers with sizes 3, 4, 2 (this being the 	 * 		  output layer) respectively.	 *	 * @param initializationName The initialization technique that will be used to initialize	 * 		  the neural network weights.	 *	 * @param functions A String array containing the activation functions tha will be used	 *        respectively in each of the computational layers.	 *	 * @throws NetworkControllerException If the number of computational layers does not	 * 	       match the number of activation functions specified an exception is thrown.	 */	public void createNeuralNetwork(int[] parameters, String initializationName, String[] functions) throws NetworkControllerException {				if ( (parameters.length-1) == functions.length ) {			try {				initialization = (WeightInitialization) Class					.forName(INITIALIZATION_PACKAGE + initializationName)					.newInstance();				if (neuralNetwork == null) {					neuralNetwork = new NeuralNetwork(parameters, initialization, functions);				} else {					throw new NetworkControllerException(resources.getString("networkAlreadyCreated"));				}			} catch (InstantiationException e1) {				throw new  NetworkControllerException(e1);			} catch (IllegalAccessException e1) {				throw new  NetworkControllerException(e1);			} catch (ClassNotFoundException e1) {				throw new  NetworkControllerException(e1);			}		} else {			throw new NetworkControllerException(resources.getString("incompatibleNumberOfLayersAndFunctions"));		}			} // createNeuralNetwork()		/**	 * Sets the training method that will be used in the network.	 *	 * @param trainingMethodName The training method that will be used to train	 *        the neural network.     *	 * @throws NetworkControllerException If a training method is already set an exception     *         is thrown.	 */	public void setTrainingMethod(String trainingMethodName) throws NetworkControllerException {			if ( trainingMethodName != null ) {					try {				trainingMethod = (ITrainingMethod) Class					.forName(METHODS_PACKAGE + trainingMethodName)					.newInstance();			} catch (InstantiationException e1) {				throw new  NetworkControllerException(e1);			} catch (IllegalAccessException e1) {				throw new  NetworkControllerException(e1);			} catch (ClassNotFoundException e1) {				throw new  NetworkControllerException(e1);			}				} else {			throw new  NetworkControllerException(resources.getString("methodAlreadySetted"));		}		} // setTrainingMethod()		/**	 * Specifies the normalization that will be used in the neural network.	 * 	 * @param parameters	 * @throws NetworkControllerException	 */	public void setNormalization(NormalizationParameters parameters) throws NetworkControllerException {		setNormalization(parameters.getNormalizer(), parameters.getType());			} //setNormalization()		/**	 * Specifies the normalization that will be used in the neural network.     *	 * @param normalizer The normaliztation technique to be used.     *	 * @param type The kind of normalization. Defined by constant in the class Normaliziation.	 *        NetworkController.INPUTNORMALIZER stands for normalization only of the input data and     *        NetworkController.FULLNORMALIZER stands for normaliztion of input and output data.	 * 	 * @throws NetworkControllerException If a wrong constant is passed in <b>type</b> an exception is thrown.	 */	public void setNormalization(String normalizer, int type) throws NetworkControllerException {				switch (type) {		case (NormalizationParameters.NO_NORMALIZE): {			isNormalizer = false;		} break;		case (NormalizationParameters.NORMALIZE_INPUT): {			try {				Class norm =  Class.forName(NORMALIZATION_PACKAGE + normalizer);				inputNormalization = (INormalization) norm.newInstance();				isNormalizer = true;				outputNormalization = (INormalization) norm.newInstance();				isFullNormalizer = true;						} catch (InstantiationException e1) {				throw new NetworkControllerException(e1);							} catch (IllegalAccessException e1) {				throw new NetworkControllerException(e1);							} catch (ClassNotFoundException e1) {				throw new NetworkControllerException(e1);							}		} break;		case (NormalizationParameters.NORMALIZE_ALL): {			try {				Class norm =  Class.forName(NORMALIZATION_PACKAGE + normalizer);				inputNormalization = (INormalization) norm.newInstance();				isNormalizer = true;						} catch (InstantiationException e1) {				throw new NetworkControllerException(e1);							} catch (IllegalAccessException e1) {				throw new NetworkControllerException(e1);							} catch (ClassNotFoundException e1) {				throw new NetworkControllerException(e1);							}		} break;		default: 			throw new NetworkControllerException(resources.getString("incompatibleNormalization"));		}										} //setNormalization()		/**	 * This is a very important method. To correctly train a neural network one must	 * choose what will be the expected output. This method supposes that the trainig 	 * or validation data are already loaded and by using it inputs and outputs are	 * automatically separated.	 * 	 * @param outputIndex The outputIndex to set.	 */	public void setOutputIndex(int[] outputIndex) {		this.outputIndex = outputIndex;		isOutputIndexSet = true;		if ( completeData != null ) {			try {				splitDataValues();			} catch (NetworkControllerException e) {				e.printStackTrace();			}		}	} // setOutputIndex()		/**	 * Trains the neural network with the options previously setted.     *	 * @param param Parameters chosen according to the training method used.     *	 * @return Statistical information of the training.     *	 * @throws NetworkControllerException Throws an exception in case:     *         <br>a) No training data was loaded.     *         <br>b) A neural network is not built.     *         <br>c) A training method is not set.	 */	public StatisticalResults trainNeuralNetwork(TrainingParameters param) throws NetworkControllerException {				boolean incorrect = false;		StatisticalResults results;				if (completeData == null) {			throw new NetworkControllerException(resources.getString("trainingDataNotLoaded"));		} else { 			setTrainingMethod(param.getMethod());			if (neuralNetwork == null) {				throw new NetworkControllerException(resources.getString("nnNotCreated"));			} else if (trainingMethod == null) {				throw new NetworkControllerException(resources.getString("methodNotSet"));			}						if ( isDataSplit ) {								if ( inputs[0].length != neuralNetwork.getInputSize() ) {					throw new NetworkControllerException(resources.getString("dataIncompatibleWithNetwork"));				} else {					//NeuralMath.printMatrix(completeData);					System.out.println("Numero de sinapses: " + neuralNetwork.numberOfSynapses());					if ( isNormalizer ) {												inputNormalization.setupParameters(inputs);						inputs = inputNormalization.normalize(inputs);						if ( isFullNormalizer ) {							outputNormalization.setupParameters(outputs);							outputs = outputNormalization.normalize(outputs);						}						System.out.println("normalizei");											} 					//NeuralMath.printMatrix(inputs);					//NeuralMath.printMatrix(outputs);					param.setInputs(inputs);					param.setOutputs(outputs);										results = trainingMethod.train(neuralNetwork, param);					date = new Date();					isNNTrained = true;					statisticalResults = results;					return results;				}			} else {				throw new NetworkControllerException(resources.getString("dataNotSplit"));			}		}	} // trainNeuralNetwork()		/**	 * Uses the existent neural network to infere the results using the available inputs.	 * One must first use the setInferenceData() first.	 * 	 * @throws NetworkControllerException	 */	public void infereWithNetwork() throws NetworkControllerException {				if (inferenceData == null) {			throw new NetworkControllerException(resources.getString("inferenceDataNotLoaded"));		} else {								if ( inferenceData[0].length != neuralNetwork.getInputSize() ) {				throw new NetworkControllerException(resources.getString("dataIncompatibleWithNetwork"));

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -