📄 bpn.java
字号:
//////////////////////////////////////////////////////////////////////////////////// // // Copyright (C) 1996 L. Patocchi & W.Gander//// This program is free software; you can redistribute it and/or modify it// under the terms of the GNU General Public License as published by the Free// Software Foundation; either version 2 of the License, or (at your option)// any later version.//// This program is distributed in the hope that it will be useful, but WITHOUT// ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or// FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for// more details.//// You should have received a copy of the GNU General Public License along with// this program; if not, write to the Free Software Foundation, Inc., 675 Mass// Ave, Cambridge, MA 02139, USA.//// Contacts:// // Project Supervisor// W.Hett hew@info.isbiel.ch// // Authors// W.Gander gandw@info.isbiel.ch// L.Patocchi patol@info.isbiel.ch//// Documentation can be found at://// http://www.isbiel.ch/Projects/janet/index.html////////////////////////////////////////////////////////////////////////////////////////// File : BPN.java//////////////////////////////////////////////////////////////////////// BPN base class//////////////////////////////////////////////////////////////////////// Author: Patocchi L.// Date: 03.09.1996// Project: jaNet//// BPN class is the class that end-user will utilise to// integrate a backpropagation jaNet network in his application.// // - a BPN network is composed by:// * 1 input layer// * 0 or more hidden layers// * 1 output layer// ////////////////////////////////////////////////////////////////////// date who what// 03.09.1996 Patocchi L. creationpackage jaNet.backprop;import java.util.*;import java.io.*;public class BPN { private static final String version = "1.0beta1";////////////////////////////////////////////////////////////////////// Protected variables//////////////////////////////////////////////////////////////////// protected BPNLayer allLayers[] = null; protected BPNWeightPack allWeights[] = null; protected inputLayer inputL; protected outputLayer outputL; // neural nets functional variables; protected double learningRate; protected double momentum; protected double initMin = -0.1; protected double initMax = 0.1; // neural nets functional constants; public static final double FIRE = 0.999; public static final double NEUTRAL = 0.0; public static final double DOWN = -0.999; ////////////////////////////////////////////////////////////////////// Private variables//////////////////////////////////////////////////////////////////// private static boolean trace;////////////////////////////////////////////////////////////////////// Constructor//////////////////////////////////////////////////////////////////// public BPN(int layerDef[], String layerActivatingFn[]) throws BPNException{ if(layerDef.length != layerActivatingFn.length + 1) throw new BPNException("BPN: Error in BPN, arguments have different size."); // Backpropagation structure type creation buildStructure(layerDef); // setup layers activating functions setActivationFn(layerActivatingFn); // rendomize network weights randomize(0.5); } public BPN(String path, String name) throws BPNException{ loadNeuralNetwork(path, name); } ////////////////////////////////////////////////////////////////////// this build just the structure of desired network. //// int layerDef[] specify the layer sizes and must// contain at least 2 values//////////////////////////////////////////////////////////////////// private void buildStructure(int layerDef[]) throws BPNException { BPNWeightPack weightP = null; hiddenLayer hiddenL; if(layerDef.length < 2) throw new BPNException("BPN: Error in buildStructure, You must define at least input and output layers."); for(int i=0; i<layerDef.length; i++) if(layerDef[i] < 1) throw new BPNException("BPN: Error in buildStructure, negative or null layer length found.."); allWeights = new BPNWeightPack[layerDef.length-1]; allLayers = new BPNLayer[layerDef.length]; for(int i=0; i<layerDef.length; i++){ if(i==0){ /////////////////////////////////////// // define input layer /////////////////////////////////////// if(trace) System.out.println("Create input layer with "+layerDef[i]+" units."); inputL = new inputLayer(layerDef[i]); allLayers[i] = (BPNLayer) inputL; // create lower BPNWeightPack weightP = new BPNWeightPack(layerDef[i],layerDef[i+1]); allWeights[i] = weightP; inputL.setLowerWeightPack(weightP); // define which kind of layer is this. inputL.setLayerKind(BPNLayer.INPUT); }else if(i==layerDef.length-1){ /////////////////////////////////////// // define output layer /////////////////////////////////////// if(trace) System.out.println("Create output layer with "+layerDef[i]+" units."); outputL = new outputLayer(layerDef[i]); allLayers[i] = (BPNLayer) outputL; // attach upper BPNWeightPack outputL.setUpperWeightPack(weightP); // attach upper Layer outputL.setUpperLayer(allLayers[i-1]); // attach upper layer to this layer allLayers[i-1].setLowerLayer(allLayers[i]); // define which kind of layer is this. outputL.setLayerKind(BPNLayer.OUTPUT); }else{ /////////////////////////////////////// // define hidden layer /////////////////////////////////////// if(trace) System.out.println("Create hidden layer with "+layerDef[i]+" units."); hiddenL = new hiddenLayer(layerDef[i]); allLayers[i] = (BPNLayer) hiddenL; // attach upper BPNWeightPack hiddenL.setUpperWeightPack(weightP); // create lower BPNWeightPack weightP = new BPNWeightPack(layerDef[i],layerDef[i+1]); allWeights[i] = weightP; // attach lower BPNWeightPack hiddenL.setLowerWeightPack(weightP); // attach upper layer hiddenL.setUpperLayer(allLayers[i-1]); // attach upper layer to this layer allLayers[i-1].setLowerLayer(allLayers[i]); // define which kind of layer is this. hiddenL.setLayerKind(BPNLayer.HIDDEN); } } } private void buildStructure(RandomAccessFile file) throws BPNException { BPNWeightPack weightP = null; hiddenLayer hiddenL; try{ if(file.readUTF().compareTo(getClass().getName()+version) != 0){ throw new BPNException("BPN: Error in buildStructure, unknown version."); } learningRate = file.readDouble(); momentum = file.readDouble(); initMin = file.readDouble(); initMax = file.readDouble(); int nlayers = file.readInt(); allLayers = new BPNLayer[nlayers]; allWeights = new BPNWeightPack[nlayers-1]; for(int i=0; i<nlayers; i++){ if(i==0){ /////////////////////////////////////// // define input layer /////////////////////////////////////// inputL = inputLayer.readFromFile(file); allLayers[i] = (BPNLayer) inputL; // create lower BPNWeightPack weightP = new BPNWeightPack(file); allWeights[i] = weightP; inputL.setLowerWeightPack(weightP); // define which kind of layer is this. inputL.setLayerKind(BPNLayer.INPUT); }else if(i==nlayers-1){ /////////////////////////////////////// // define output layer /////////////////////////////////////// outputL = outputLayer.readFromFile(file); allLayers[i] = (BPNLayer) outputL; // attach upper BPNWeightPack outputL.setUpperWeightPack(weightP); // attach upper Layer outputL.setUpperLayer(allLayers[i-1]); // attach upper layer to this layer allLayers[i-1].setLowerLayer(allLayers[i]); // define which kind of layer is this. outputL.setLayerKind(BPNLayer.OUTPUT); // setup neural net parameters
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -