📄 backpropagationmodel.java
字号:
/* * $RCSfile: BackPropagationModel.java,v $ * $Revision: 1.5 $ * $Date: 2005/05/08 02:16:28 $ * * NeuralNetworkToolkit * Copyright (C) 2004 Universidade de Brasília * * This file is part of NeuralNetworkToolkit. * * NeuralNetworkToolkit is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2 of the License, or * (at your option) any later version. * * NeuralNetworkToolkit is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with NeuralNetworkToolkit; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA - 02111-1307 - USA. */package neuralnetworktoolkit.methods.gradientbased.backpropagation;import neuralnetworktoolkit.StatisticalResults;import neuralnetworktoolkit.methods.gradientbased.GradientBasedMethod;import neuralnetworktoolkit.neuralnetwork.*;/** * * * @version $Revision: 1.5 $ - $Date: 2005/05/08 02:16:28 $ * * @author <a href="mailto:hugoiver@yahoo.com.br">Hugo Iver V. Gonçalves</a> * @author <a href="mailto:rodbra@pop.com.br">Rodrigo C. M. Coimbra</a> */public abstract class BackPropagationModel extends GradientBasedMethod { /** Maximum number of epochs for training. */ public static final int MAX_EPOCHS = 10000000; /** Iteration condition. */ protected boolean goAhead = true; protected StatisticalResults results; protected double inicio, fim; protected int inputSize; protected int outputSize; protected int numberOfEpochs; protected double totalErrorEnergy; protected double instantaneousError; protected double learningRate; protected double[][] deltaW; protected double[][] instantDeltaW; protected double[] derivativesVector; protected double errorGoal; protected int maximumNumberOfEpochs; protected int numberOfSynapses; public BackPropagationModel() { this.inputSize = 0; this.outputSize = 0; this.numberOfEpochs = 0; this.totalErrorEnergy = 500; this.instantaneousError = 0; this.errorGoal = 0; this.maximumNumberOfEpochs = OnLineBackPropagation.MAX_EPOCHS; this.results = new StatisticalResults(); } /** * Calculates total error. * * @param neuralNetwork * Neural network to calculate error. * @param instanceSet * Training instances set. * @param outputs * Network expected outputs. * * @return Total error calculated. */ public double calculateTotalError(INeuralNetwork neuralNetwork, double[][] instanceSet, double[][] outputs) { double error = 0; double actualOutput = 0; for (int i = 0; i < instanceSet.length; i++) { actualOutput = 0; neuralNetwork.inputLayerSetup(instanceSet[i]); neuralNetwork.propagateInput(); actualOutput = neuralNetwork.retrieveFinalResults()[0]; error = error + (outputs[i][0] - actualOutput) * (outputs[i][0] - actualOutput); } return error; } //calculateTotalError() /** * Calculates neuron deltas. * * @param neuralNetwork * Neural network to calculate deltas. * @param outputs * Network expected outputs. */ public void calculateNeuronDeltas(INeuralNetwork neuralNetwork, double[] outputs) { double inputValue = 0; double wXdelta = 0; if (neuralNetwork.isDynamic() == false) { if (neuralNetwork.isMultiConexed() == true) { ILayer l1 = neuralNetwork.getLayer(neuralNetwork .getNetworkSize() - 1); ILayer l2 = neuralNetwork.getLayer(neuralNetwork .getNetworkSize() - 2); for (int k = 0; k < neuralNetwork.getLayer( neuralNetwork.getNetworkSize() - 1).getLayerSize(); k++) { inputValue = 0; // Calculates the inputValue for the Output Layer. for (int j = 0; j < neuralNetwork.getLayer( neuralNetwork.getNetworkSize() - 2).getLayerSize(); j++) { inputValue = inputValue + l2.getNeuron(j).getOutputValue() * l1.getWeight(j, k); } inputValue = inputValue + l1.getBias(k); l1.getNeuron(k).setDelta( -l1.getNeuron(k).getActivationFunction() .functionDerivative(inputValue) * (outputs[k] - l1.getNeuron(k) .getOutputValue())); } for (int i = (neuralNetwork.getNetworkSize() - 2); i >= 0; i--) { for (int j = 0; j < neuralNetwork.getLayer(i) .getLayerSize(); j++) { inputValue = 0; wXdelta = 0; // Calculates the respective weight times delta of the // forward layer. for (int k = 0; k < neuralNetwork.getLayer(i + 1) .getLayerSize(); k++) { wXdelta = wXdelta + neuralNetwork.getLayer(i + 1).getWeight( j, k) * neuralNetwork.getLayer(i + 1) .getNeuron(k).getDelta(); } if (i > 0) { for (int a = 0; a < neuralNetwork.getLayer(i - 1) .getLayerSize(); a++) { inputValue = inputValue + neuralNetwork.getLayer(i - 1) .getNeuron(a).getOutputValue() * neuralNetwork.getLayer(i).getWeight( a, j); } inputValue = inputValue + neuralNetwork.getLayer(i).getBias(j); } else { for (int a = 0; a < neuralNetwork .getStaticInputValues().length; a++) { inputValue = inputValue + neuralNetwork.getStaticInputValues()[a] * neuralNetwork.getLayer(i).getWeight( a, j); } inputValue = inputValue + neuralNetwork.getLayer(i).getBias(j); } l1 = neuralNetwork.getLayer(i); l1.getNeuron(j).setDelta( l1.getNeuron(j).getActivationFunction() .functionDerivative(inputValue) * wXdelta); } } } else { // TODO implement this case. } } else { // TODO Implement this case. if (neuralNetwork.isMultiConexed() == true) { // TODO Implement this. } else { // TODO Implement this. } } } //calculateNeuronDeltas()} //BackPropagationModel
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -