📄 backpropagation.java
字号:
/*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
*/
/**
* Title: XELOPES Data Mining Library
* Description: The XELOPES library is an open platform-independent and data-source-independent library for Embedded Data Mining.
* Copyright: Copyright (c) 2002 Prudential Systems Software GmbH
* Company: ZSoft (www.zsoft.ru), Prudsys (www.prudsys.com)
* @author Michael Thess
* @version 1.1
*/
package com.prudsys.pdm.Models.Regression.NeuralNetwork.Algorithms;
import com.prudsys.pdm.Core.CategoricalAttribute;
import com.prudsys.pdm.Core.MiningAttribute;
import com.prudsys.pdm.Core.MiningException;
import com.prudsys.pdm.Core.NumericAttribute;
import com.prudsys.pdm.Input.MiningVector;
import com.prudsys.pdm.Models.Regression.NeuralNetwork.ActivationFunction;
import com.prudsys.pdm.Models.Regression.NeuralNetwork.NeuralInput;
import com.prudsys.pdm.Models.Regression.NeuralNetwork.NeuralLayer;
import com.prudsys.pdm.Models.Regression.NeuralNetwork.NeuralNetwork;
import com.prudsys.pdm.Models.Regression.NeuralNetwork.NeuralNetworkAlgorithm;
import com.prudsys.pdm.Models.Regression.NeuralNetwork.NeuralNetworkSettings;
import com.prudsys.pdm.Models.Regression.NeuralNetwork.NeuralOutput;
import com.prudsys.pdm.Transform.OneToOne.Copy;
/**
* Class for backpropagation. Still not finished!!!!!
*/
public class Backpropagation extends NeuralNetworkAlgorithm
{
/** Decrease of learning rate per iteration. */
protected double decreasingRate = 1.0;
/**
* Runs backpropagation algorithm, also backpropagation
* with momentum.
*
* @exception MiningException could not run algorithm
*/
protected void runAlgorithm() throws MiningException {
/************************************************************************/
/* Build the network... */
/************************************************************************/
// Automatically build neural network:
int nInp = 0;
int nOut = 0;
if (autoBuildNetwork) {
// Get number of inputs and outputs:
nInp = metaData.getAttributesNumber() - 1;
nOut = 1;
if (target instanceof CategoricalAttribute)
nOut = ((CategoricalAttribute) target).getCategoriesNumber();
// Build network (simple approach):
neuralNetwork = new NeuralNetwork();
int[] layers = {nInp, nInp, nOut, nOut};
neuralNetwork.createNetworkAutomatically(layers);
// Change activation function of output layers to linear softmax:
NeuralLayer linNeuron = neuralNetwork.getNeuralLayer()[2];
linNeuron.setActivationFunction( ActivationFunction.getInstance(
ActivationFunction.LINEAR) );
NeuralLayer outLayer = neuralNetwork.getOutputLayer();
for (int i = 0; i < outLayer.getNumberOfNodes(); i++)
((NeuralOutput) outLayer.getNeuralNodes()[i]).setSoftmax(true);
// Assign attributes to input neurons:
NeuralLayer inputLayer = neuralNetwork.getInputLayer();
for (int i = 0; i < nInp + 1; i++) {
MiningAttribute ma = metaData.getMiningAttribute(i);
if ( ma.getName().equals( target.getName() ) )
continue;
// Use identity transformation:
Copy cp = new Copy();
cp.setSourceName( ma.getName() );
cp.setSourceAttribute(ma);
cp.setRemoveSourceAttribute(false);
NeuralInput ninp = (NeuralInput) inputLayer.getNeuralNodes()[i];
ninp.setOneToOneMapping(cp);
}
// Output neuron:
NeuralLayer outputLayer = neuralNetwork.getOutputLayer();
for (int i = 0; i < nOut; i++) {
String attName = "outAttribute";
if (target instanceof CategoricalAttribute)
attName = "outAtt_" + ((CategoricalAttribute) target).getCategory(i);
// Use identity transformation:
Copy cp = new Copy();
cp.setSourceName( attName );
cp.setRemoveSourceAttribute(false);
NeuralOutput nout = (NeuralOutput) outputLayer.getNeuralNodes()[i];
nout.setOneToOneMapping(cp);
}
}
System.out.println(neuralNetwork);
neuralNetwork.setMetaData(metaData);
neuralNetwork.setClassName( target.getName() );
/************************************************************************/
/* Run backpropagation algorithm... */
/************************************************************************/
// Run backpropagation algorithm:
if (learningType == NeuralNetworkSettings.BACK_PROPAGATION)
momentum = 0;
double rate = learningRate;
int nVec = miningInputStream.getVectorsNumber();
for (int i = 0; i < maxNumberOfIterations; i++) {
double error = 0.0;
// One training iteration through through all vectors:
miningInputStream.reset();
while ( miningInputStream.next() ) {
MiningVector miningVector = miningInputStream.read();
assignVectorToNetwork(neuralNetwork, miningVector);
neuralNetwork.resetValues();
neuralNetwork.outputValues(true);
neuralNetwork.errorValues(true);
error = error + neuralNetwork.trainingError();
neuralNetwork.updateWeights( rate, momentum );
};
// Normalize and assess error:
error = error / (nOut*nVec);
System.out.println(i + ": error = " + error);
if ( Double.isInfinite(error) || Double.isNaN(error) )
throw new MiningException("network training error; change parameters");
if (error <= maxError)
break;
rate = rate*decreasingRate;
}
}
/**
* Assigns a mining vector to the Neural Network. All values except
* the target value are assigned to the input neurons, the target value
* to the output neurons.
*
* @param network neural network
* @param vector mining vector to assign
* @exception MiningException cannot assign vector
*/
private void assignVectorToNetwork(NeuralNetwork network, MiningVector vector)
throws MiningException {
NeuralLayer inputLayer = network.getInputLayer();
NeuralLayer outputLayer = network.getOutputLayer();
for (int i = 0; i < metaData.getAttributesNumber() ; i++) {
MiningAttribute ma = metaData.getMiningAttribute(i);
// Target attribute => set output:
if ( ma.getName().equals( target.getName() ) ) {
if (target instanceof NumericAttribute) {
NeuralOutput no = (NeuralOutput) outputLayer.getNeuralNodes()[0];
no.setPredictedValue( vector.getValue(i) );
}
else {
int ind = (int) vector.getValue(target);
int nOut = ((CategoricalAttribute) target).getCategoriesNumber();
for (int j = 0; j < nOut; j++) {
NeuralOutput no = (NeuralOutput) outputLayer.getNeuralNodes()[j];
if (j == ind)
no.setPredictedValue(1);
else
no.setPredictedValue(0);
}
}
}
// Set input:
else {
NeuralInput ni = (NeuralInput) inputLayer.getNeuralNodes()[i];
ni.setInput( vector.getValue(i) );
};
}
}
/**
* Returns decreasing rate.
*
* @return decreasing rate
*/
public double getDecreasingRate() {
return decreasingRate;
}
/**
* Sets new decreasing rate.
*
* @param decreasingRate new decreasing rate
*/
public void setDecreasingRate(double decreasingRate) {
this.decreasingRate = decreasingRate;
}
}
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -